Compare commits
No commits in common. "ce15e46c361862ad900da9fbceda66a570e27deb" and "398e370c4912248ae3b0c063036b8f2c08325c99" have entirely different histories.
ce15e46c36
...
398e370c49
|
@ -1,38 +0,0 @@
|
||||||
# Werelogs 2.0
|
|
||||||
|
|
||||||
## Current Issues
|
|
||||||
|
|
||||||
Logging currently is taxing a lot on the node.js process it's being called from.
|
|
||||||
Upon closer look `bunyan` and `bunyan-logstash` modules are doing parsing,
|
|
||||||
stringifying, decorating and object-copying on top of what Werelogs is already
|
|
||||||
doing.
|
|
||||||
|
|
||||||
## Proposal
|
|
||||||
1. Remove bunyan and bunyan-logstash modules from Werelogs
|
|
||||||
2. Werelogs would be logging directly to STDOUT.
|
|
||||||
|
|
||||||
Logs will be in the same JSON format as we have had when using bunyan,
|
|
||||||
bunyan-logstash. Any stacktrace printed by the process would be sent to
|
|
||||||
STDERR.
|
|
||||||
3. Use Filebeat - a lightweight log shipper that can ship logs directly to
|
|
||||||
Logstash or Elasticsearch
|
|
||||||
|
|
||||||
Filebeat is an agent that can attach to an individual process. So the idea
|
|
||||||
is to attach a Filebeat agent to each S3, bucketd, repd, vaultd etc.
|
|
||||||
processes and read directly from STDOUT. Since it can rotate the logs on its
|
|
||||||
own, we can write JSON logs with rotation on the disk (if needed).
|
|
||||||
We can also generate SYSLOG format(if needed).
|
|
||||||
|
|
||||||
|
|
||||||
## Work in progress
|
|
||||||
|
|
||||||
1. Werelogs#wip/2.0 branch has bunyan modules removed, logging to STDOUT and
|
|
||||||
removes some object copying overhead.
|
|
||||||
|
|
||||||
## To be done
|
|
||||||
|
|
||||||
1. Install Filebeat to work with MetaData and send the logs directly to Elasticsearch
|
|
||||||
2. Explore if we have to go through Logstash from Filebeat for additonal decoration or
|
|
||||||
normalization
|
|
||||||
3. Work on Federation to generate required config and spawning of Filebeat agents with
|
|
||||||
the deployed processes.
|
|
|
@ -1,8 +1,9 @@
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
|
const bunyan = require('bunyan');
|
||||||
|
|
||||||
const LogLevel = require('./LogLevel.js');
|
const LogLevel = require('./LogLevel.js');
|
||||||
const SimpleLogger = require('./SimpleLogger.js');
|
|
||||||
/**
|
/**
|
||||||
* This class is the central point of the once-only configuration system for
|
* This class is the central point of the once-only configuration system for
|
||||||
* werelogs. It is instanciated once to be exported, and all configuration
|
* werelogs. It is instanciated once to be exported, and all configuration
|
||||||
|
@ -97,7 +98,10 @@ class Config {
|
||||||
return stream;
|
return stream;
|
||||||
});
|
});
|
||||||
|
|
||||||
this.simpleLogger = new SimpleLogger( { name: 'werelogs', streams: this.streams } );
|
this.bLogger = bunyan.createLogger({
|
||||||
|
name: 'werelogs',
|
||||||
|
streams: this.streams,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return this;
|
return this;
|
||||||
|
@ -113,7 +117,7 @@ class Config {
|
||||||
* operations by the user code.
|
* operations by the user code.
|
||||||
*/
|
*/
|
||||||
get logger() {
|
get logger() {
|
||||||
return this.simpleLogger;
|
return this.bLogger;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -83,8 +83,7 @@ class Logger {
|
||||||
|
|
||||||
_doLog(levelName, msg, data) {
|
_doLog(levelName, msg, data) {
|
||||||
const bLogger = Config.logger;
|
const bLogger = Config.logger;
|
||||||
const finalData = new Map();
|
const finalData = { name: this.name };
|
||||||
finalData.set('name', this.name);
|
|
||||||
if (!LogLevel.shouldLog(levelName, Config.level)) {
|
if (!LogLevel.shouldLog(levelName, Config.level)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -99,7 +98,7 @@ class Logger {
|
||||||
}
|
}
|
||||||
if (data) {
|
if (data) {
|
||||||
Object.keys(data).forEach((k) => {
|
Object.keys(data).forEach((k) => {
|
||||||
finalData.set(k, data[k]);
|
finalData[k] = data[k];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const args = [ finalData, msg ];
|
const args = [ finalData, msg ];
|
||||||
|
|
|
@ -6,7 +6,6 @@ const LogLevel = require('./LogLevel.js');
|
||||||
const Utils = require('./Utils.js');
|
const Utils = require('./Utils.js');
|
||||||
const serializeUids = Utils.serializeUids;
|
const serializeUids = Utils.serializeUids;
|
||||||
const generateUid = Utils.generateUid;
|
const generateUid = Utils.generateUid;
|
||||||
const copyFields = Utils.copyFields;
|
|
||||||
|
|
||||||
function ensureUidValidity(uid) {
|
function ensureUidValidity(uid) {
|
||||||
if (uid.indexOf(':') !== -1) {
|
if (uid.indexOf(':') !== -1) {
|
||||||
|
@ -18,14 +17,16 @@ function ensureUidValidity(uid) {
|
||||||
class EndLogger {
|
class EndLogger {
|
||||||
constructor(reqLogger) {
|
constructor(reqLogger) {
|
||||||
this.logger = reqLogger;
|
this.logger = reqLogger;
|
||||||
this.fields = new Map();
|
this.fields = {};
|
||||||
}
|
}
|
||||||
|
|
||||||
augmentedLog(level, msg, data) {
|
augmentedLog(level, msg, data) {
|
||||||
assert.strictEqual(this.logger.elapsedTime, null, 'The logger\'s'
|
assert.strictEqual(this.logger.elapsedTime, null, 'The logger\'s'
|
||||||
+ 'end() wrapper should not be called more than'
|
+ 'end() wrapper should not be called more than'
|
||||||
+ ' once.');
|
+ ' once.');
|
||||||
copyFields(this.fields, data || {});
|
// We can alter current instance, as it won't be usable after this
|
||||||
|
// call.
|
||||||
|
this.fields = Object.assign(this.fields, data || {});
|
||||||
return this.logger.log(level, msg, this.fields, true);
|
return this.logger.log(level, msg, this.fields, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,7 +45,7 @@ class EndLogger {
|
||||||
*/
|
*/
|
||||||
addDefaultFields(fields) {
|
addDefaultFields(fields) {
|
||||||
const oldFields = this.fields;
|
const oldFields = this.fields;
|
||||||
copyFields(this.fields, fields);
|
this.fields = Object.assign({}, this.fields, fields);
|
||||||
return oldFields;
|
return oldFields;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -192,7 +193,7 @@ class RequestLogger {
|
||||||
this.uids = uidList || [ generateUid() ];
|
this.uids = uidList || [ generateUid() ];
|
||||||
|
|
||||||
this.entries = [];
|
this.entries = [];
|
||||||
this.fields = new Map();
|
this.fields = {};
|
||||||
this.logLevel = logLevel;
|
this.logLevel = logLevel;
|
||||||
this.dumpThreshold = dumpThreshold;
|
this.dumpThreshold = dumpThreshold;
|
||||||
this.endLevel = endLevel;
|
this.endLevel = endLevel;
|
||||||
|
@ -247,7 +248,7 @@ class RequestLogger {
|
||||||
*/
|
*/
|
||||||
addDefaultFields(fields) {
|
addDefaultFields(fields) {
|
||||||
const oldFields = this.fields;
|
const oldFields = this.fields;
|
||||||
copyFields(this.fields, fields);
|
this.fields = Object.assign({}, this.fields, fields);
|
||||||
return oldFields;
|
return oldFields;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -432,21 +433,27 @@ class RequestLogger {
|
||||||
});
|
});
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let elapsedMs = 0;
|
const fields = Object.assign({}, logFields || {});
|
||||||
const endFlag = isEnd || false;
|
const endFlag = isEnd || false;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Even though this is added automatically by bunyan, it uses an
|
||||||
|
* expensive regex to figure out the native Date function. By adding
|
||||||
|
* timestamp here avoids that expensive call.
|
||||||
|
*/
|
||||||
|
if (fields.time === undefined) {
|
||||||
|
fields.time = new Date();
|
||||||
|
}
|
||||||
|
|
||||||
|
fields.req_id = serializeUids(this.uids);
|
||||||
if (endFlag) {
|
if (endFlag) {
|
||||||
this.elapsedTime = process.hrtime(this.startTime);
|
this.elapsedTime = process.hrtime(this.startTime);
|
||||||
elapsedMs = this.elapsedTime[0] * 1000
|
fields.elapsed_ms = this.elapsedTime[0] * 1000
|
||||||
+ this.elapsedTime[1] / 1000000;
|
+ this.elapsedTime[1] / 1000000;
|
||||||
}
|
}
|
||||||
this.fields.set('time', new Date());
|
|
||||||
this.fields.set('req_id', serializeUids(this.uids));
|
|
||||||
this.fields.set('elapsed_ms', elapsedMs);
|
|
||||||
copyFields(this.fields, logFields || {});
|
|
||||||
|
|
||||||
const logEntry = {
|
const logEntry = {
|
||||||
level,
|
level,
|
||||||
fields: this.fields,
|
fields,
|
||||||
msg,
|
msg,
|
||||||
};
|
};
|
||||||
this.entries.push(logEntry);
|
this.entries.push(logEntry);
|
||||||
|
@ -477,9 +484,30 @@ class RequestLogger {
|
||||||
* @returns {undefined}
|
* @returns {undefined}
|
||||||
*/
|
*/
|
||||||
doLogIO(logEntry) {
|
doLogIO(logEntry) {
|
||||||
LogLevel.throwIfInvalid(logEntry.level);
|
const fields = Object.assign({}, this.fields, logEntry.fields);
|
||||||
|
|
||||||
this.bLogger[logEntry.level](logEntry.fields, logEntry.msg);
|
switch (logEntry.level) {
|
||||||
|
case 'trace':
|
||||||
|
this.bLogger.trace(fields, logEntry.msg);
|
||||||
|
break;
|
||||||
|
case 'debug':
|
||||||
|
this.bLogger.debug(fields, logEntry.msg);
|
||||||
|
break;
|
||||||
|
case 'info':
|
||||||
|
this.bLogger.info(fields, logEntry.msg);
|
||||||
|
break;
|
||||||
|
case 'warn':
|
||||||
|
this.bLogger.warn(fields, logEntry.msg);
|
||||||
|
break;
|
||||||
|
case 'error':
|
||||||
|
this.bLogger.error(fields, logEntry.msg);
|
||||||
|
break;
|
||||||
|
case 'fatal':
|
||||||
|
this.bLogger.fatal(fields, logEntry.msg);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
throw new Error(`Unexpected log level: ${logEntry.level}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,68 +0,0 @@
|
||||||
'use strict';
|
|
||||||
|
|
||||||
function isWriteableStream(s) {
|
|
||||||
if (!s || !s.level || !s.stream) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
// duck typing to check if the obect is a writeable stream
|
|
||||||
if (typeof s.stream._write !== 'function') {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
class SimpleLogger {
|
|
||||||
constructor(name, streams) {
|
|
||||||
this.name = name;
|
|
||||||
this.streams = [ { level: 'info', stream: process.stdout } ];
|
|
||||||
if (streams) {
|
|
||||||
if (!Array.isArray(streams)) {
|
|
||||||
throw new Error('Invalid streams. streams must be an array list' +
|
|
||||||
' of writeable streams');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!streams.every(isWriteableStream)) {
|
|
||||||
throw new Error('Stream supplied is not a Writeable stream');
|
|
||||||
}
|
|
||||||
this.streams = streams;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
log(level, fields, message) {
|
|
||||||
this.streams.forEach( s => {
|
|
||||||
const logEntry = Object.create(null);
|
|
||||||
logEntry.level = level;
|
|
||||||
logEntry.msg = message;
|
|
||||||
if (fields instanceof Map) {
|
|
||||||
fields.forEach((v, k) => { logEntry[k] = v; });
|
|
||||||
}
|
|
||||||
s.stream.write(JSON.stringify(logEntry) + '\n');
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
info(fields, message) {
|
|
||||||
this.log('info', fields, message);
|
|
||||||
}
|
|
||||||
|
|
||||||
debug(fields, message) {
|
|
||||||
this.log('debug', fields, message);
|
|
||||||
}
|
|
||||||
|
|
||||||
trace(fields, message) {
|
|
||||||
this.log('trace', fields, message);
|
|
||||||
}
|
|
||||||
|
|
||||||
warn(fields, message) {
|
|
||||||
this.log('warn', fields, message);
|
|
||||||
}
|
|
||||||
|
|
||||||
error(fields, message) {
|
|
||||||
this.log('error', fields, message);
|
|
||||||
}
|
|
||||||
|
|
||||||
fatal(fields, message) {
|
|
||||||
this.log('fatal', fields, message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = SimpleLogger;
|
|
14
lib/Utils.js
14
lib/Utils.js
|
@ -40,22 +40,8 @@ function unserializeUids(stringdata) {
|
||||||
return stringdata.split(':');
|
return stringdata.split(':');
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This function copies the properties from the source object to the fields map
|
|
||||||
* @param {map} fields - Map of log entry fields
|
|
||||||
* @param {object} source - object to copy from
|
|
||||||
* @returns {map} - map object containing the copied fields
|
|
||||||
*/
|
|
||||||
function copyFields(fields, source) {
|
|
||||||
Object.keys(source).forEach(f => {
|
|
||||||
fields.set(f, source[f]);
|
|
||||||
});
|
|
||||||
return fields;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
generateUid,
|
generateUid,
|
||||||
serializeUids,
|
serializeUids,
|
||||||
unserializeUids,
|
unserializeUids,
|
||||||
copyFields,
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -29,7 +29,9 @@
|
||||||
"url": "https://github.com/scality/werelogs/issues"
|
"url": "https://github.com/scality/werelogs/issues"
|
||||||
},
|
},
|
||||||
"homepage": "https://github.com/scality/werelogs#readme",
|
"homepage": "https://github.com/scality/werelogs#readme",
|
||||||
"dependencies": {},
|
"dependencies": {
|
||||||
|
"bunyan": "^1.5.1"
|
||||||
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"eslint": "^1.10.1",
|
"eslint": "^1.10.1",
|
||||||
"eslint-config-airbnb": "^1.0.2",
|
"eslint-config-airbnb": "^1.0.2",
|
||||||
|
|
|
@ -19,36 +19,33 @@ class DummyLogger {
|
||||||
}
|
}
|
||||||
|
|
||||||
trace(obj, msg) {
|
trace(obj, msg) {
|
||||||
this._doLog('trace', obj, msg);
|
this.ops.push(['trace', [obj, msg]]);
|
||||||
|
this.counts.trace += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
debug(obj, msg) {
|
debug(obj, msg) {
|
||||||
this._doLog('debug', obj, msg);
|
this.ops.push(['debug', [obj, msg]]);
|
||||||
|
this.counts.debug += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
info(obj, msg) {
|
info(obj, msg) {
|
||||||
this._doLog('info', obj, msg);
|
this.ops.push(['info', [obj, msg]]);
|
||||||
|
this.counts.info += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
warn(obj, msg) {
|
warn(obj, msg) {
|
||||||
this._doLog('warn', obj, msg);
|
this.ops.push(['warn', [obj, msg]]);
|
||||||
|
this.counts.warn += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
error(obj, msg) {
|
error(obj, msg) {
|
||||||
this._doLog('error', obj, msg);
|
this.ops.push(['error', [obj, msg]]);
|
||||||
|
this.counts.error += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
fatal(obj, msg) {
|
fatal(obj, msg) {
|
||||||
this._doLog('fatal', obj, msg);
|
this.ops.push(['fatal', [obj, msg]]);
|
||||||
}
|
this.counts.fatal += 1;
|
||||||
|
|
||||||
_doLog(level, obj, msg) {
|
|
||||||
const fields = Object.create(null);
|
|
||||||
if (obj instanceof Map) {
|
|
||||||
obj.forEach((v, k) => fields[k] = v);
|
|
||||||
}
|
|
||||||
this.ops.push([ level, [ fields, msg]]);
|
|
||||||
this.counts[level] += 1;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
const assert = require('assert');
|
const assert = require('assert');
|
||||||
|
const bunyan = require('bunyan');
|
||||||
|
|
||||||
const Config = require('../../lib/Config.js');
|
const Config = require('../../lib/Config.js');
|
||||||
|
const logBuffer = new bunyan.RingBuffer({ limit: 1 });
|
||||||
|
|
||||||
describe('Config', () => {
|
describe('Config', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
@ -38,6 +40,15 @@ describe('Config', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('streams should be updateable', (done) => {
|
||||||
|
const origStreams = Config.streams;
|
||||||
|
const origLogger = Config.logger;
|
||||||
|
Config.update({ streams: [{ type: 'raw', stream: logBuffer }] });
|
||||||
|
assert.notStrictEqual(origStreams, Config.streams, 'Expected Config\'s streams to have been updated.');
|
||||||
|
assert.notStrictEqual(origLogger, Config.Logger, 'Expected Config\'s logger to have been replaced by update.');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
it('should not be modified by an empty config object', (done) => {
|
it('should not be modified by an empty config object', (done) => {
|
||||||
const origLevel = Config.level;
|
const origLevel = Config.level;
|
||||||
const origDump = Config.dump;
|
const origDump = Config.dump;
|
||||||
|
|
|
@ -27,7 +27,7 @@ function filterGenerator(logLevel, callLevel) {
|
||||||
* Here, patch the Config by setting a specificly designed dummyLogger
|
* Here, patch the Config by setting a specificly designed dummyLogger
|
||||||
* for testing purposes that will help us collect runtime data.
|
* for testing purposes that will help us collect runtime data.
|
||||||
*/
|
*/
|
||||||
Config.simpleLogger = dummyLogger;
|
Config.bLogger = dummyLogger;
|
||||||
|
|
||||||
return logger;
|
return logger;
|
||||||
}
|
}
|
||||||
|
@ -174,7 +174,7 @@ describe('WereLogs Logger is usable:', () => {
|
||||||
it('Uses the additional fields as expected', (done) => {
|
it('Uses the additional fields as expected', (done) => {
|
||||||
const dummyLogger = new DummyLogger();
|
const dummyLogger = new DummyLogger();
|
||||||
const logger = new Logger('test');
|
const logger = new Logger('test');
|
||||||
Config.simpleLogger = dummyLogger;
|
Config.bLogger = dummyLogger;
|
||||||
const fields = {
|
const fields = {
|
||||||
ip: '127.0.0.1',
|
ip: '127.0.0.1',
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
|
@ -195,7 +195,7 @@ describe('WereLogs Logger is usable:', () => {
|
||||||
];
|
];
|
||||||
function createMisusableLogger(dummyLogger) {
|
function createMisusableLogger(dummyLogger) {
|
||||||
const logger = new Logger('test');
|
const logger = new Logger('test');
|
||||||
Config.simpleLogger = dummyLogger;
|
Config.bLogger = dummyLogger;
|
||||||
return logger;
|
return logger;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,6 @@ const Utils = require('../../lib/Utils.js');
|
||||||
const generateUid = Utils.generateUid;
|
const generateUid = Utils.generateUid;
|
||||||
const serializeUids = Utils.serializeUids;
|
const serializeUids = Utils.serializeUids;
|
||||||
const unserializeUids = Utils.unserializeUids;
|
const unserializeUids = Utils.unserializeUids;
|
||||||
const copyFields = Utils.copyFields;
|
|
||||||
|
|
||||||
describe('Utils: generateUid', () => {
|
describe('Utils: generateUid', () => {
|
||||||
it('generates a string-typed ID', (done) => {
|
it('generates a string-typed ID', (done) => {
|
||||||
|
@ -43,19 +42,3 @@ describe('Utils: serializeUids', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Utils: copyFields', () => {
|
|
||||||
it('copies all the properties from source to target object', (done) => {
|
|
||||||
const target = new Map();
|
|
||||||
target.set('foo', 'bar');
|
|
||||||
const source = { id: 1, name: 'demo', value: { a: 1, b: 2, c: 3 } };
|
|
||||||
const result = new Map();
|
|
||||||
result.set('foo', 'bar');
|
|
||||||
result.set('id', 1);
|
|
||||||
result.set('name', 'demo');
|
|
||||||
result.set('value', { a: 1, b: 2, c: 3 });
|
|
||||||
copyFields(target, source);
|
|
||||||
assert.deepStrictEqual(target, result, 'target should have the same properties as source');
|
|
||||||
done();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
Loading…
Reference in New Issue