Compare commits

..

No commits in common. "95822076e2477906f6031a6f82351c906d04f696" and "398e370c4912248ae3b0c063036b8f2c08325c99" have entirely different histories.

13 changed files with 115 additions and 256 deletions

View File

@ -1,38 +0,0 @@
# Werelogs 2.0
## Current Issues
Logging currently is taxing a lot on the node.js process it's being called from.
Upon closer look `bunyan` and `bunyan-logstash` modules are doing parsing,
stringifying, decorating and object-copying on top of what Werelogs is already
doing.
## Proposal
1. Remove bunyan and bunyan-logstash modules from Werelogs
2. Werelogs would be logging directly to STDOUT.
Logs will be in the same JSON format as we have had when using bunyan,
bunyan-logstash. Any stacktrace printed by the process would be sent to
STDERR.
3. Use Filebeat - a lightweight log shipper that can ship logs directly to
Logstash or Elasticsearch
Filebeat is an agent that can attach to an individual process. So the idea
is to attach a Filebeat agent to each S3, bucketd, repd, vaultd etc.
processes and read directly from STDOUT. Since it can rotate the logs on its
own, we can write JSON logs with rotation on the disk (if needed).
We can also generate SYSLOG format(if needed).
## Work in progress
1. Werelogs#wip/2.0 branch has bunyan modules removed, logging to STDOUT and
removes some object copying overhead.
## To be done
1. Install Filebeat to work with MetaData and send the logs directly to Elasticsearch
2. Explore if we have to go through Logstash from Filebeat for additonal decoration or
normalization
3. Work on Federation to generate required config and spawning of Filebeat agents with
the deployed processes.

View File

@ -1,8 +1,9 @@
'use strict'; 'use strict';
const bunyan = require('bunyan');
const LogLevel = require('./LogLevel.js'); const LogLevel = require('./LogLevel.js');
const SimpleLogger = require('./SimpleLogger.js');
/** /**
* This class is the central point of the once-only configuration system for * This class is the central point of the once-only configuration system for
* werelogs. It is instanciated once to be exported, and all configuration * werelogs. It is instanciated once to be exported, and all configuration
@ -97,7 +98,10 @@ class Config {
return stream; return stream;
}); });
this.simpleLogger = new SimpleLogger( 'werelogs', this.streams ); this.bLogger = bunyan.createLogger({
name: 'werelogs',
streams: this.streams,
});
} }
return this; return this;
@ -113,7 +117,7 @@ class Config {
* operations by the user code. * operations by the user code.
*/ */
get logger() { get logger() {
return this.simpleLogger; return this.bLogger;
} }
/** /**

View File

@ -83,8 +83,7 @@ class Logger {
_doLog(levelName, msg, data) { _doLog(levelName, msg, data) {
const bLogger = Config.logger; const bLogger = Config.logger;
const finalData = new Map(); const finalData = { name: this.name };
finalData.set('name', this.name);
if (!LogLevel.shouldLog(levelName, Config.level)) { if (!LogLevel.shouldLog(levelName, Config.level)) {
return; return;
} }
@ -99,7 +98,7 @@ class Logger {
} }
if (data) { if (data) {
Object.keys(data).forEach((k) => { Object.keys(data).forEach((k) => {
finalData.set(k, data[k]); finalData[k] = data[k];
}); });
} }
const args = [ finalData, msg ]; const args = [ finalData, msg ];

View File

@ -6,7 +6,6 @@ const LogLevel = require('./LogLevel.js');
const Utils = require('./Utils.js'); const Utils = require('./Utils.js');
const serializeUids = Utils.serializeUids; const serializeUids = Utils.serializeUids;
const generateUid = Utils.generateUid; const generateUid = Utils.generateUid;
const copyFields = Utils.copyFields;
function ensureUidValidity(uid) { function ensureUidValidity(uid) {
if (uid.indexOf(':') !== -1) { if (uid.indexOf(':') !== -1) {
@ -18,18 +17,16 @@ function ensureUidValidity(uid) {
class EndLogger { class EndLogger {
constructor(reqLogger) { constructor(reqLogger) {
this.logger = reqLogger; this.logger = reqLogger;
this.fields = Object.create(null); this.fields = {};
} }
augmentedLog(level, msg, data) { augmentedLog(level, msg, data) {
assert.strictEqual(this.logger.elapsedTime, null, 'The logger\'s' assert.strictEqual(this.logger.elapsedTime, null, 'The logger\'s'
+ 'end() wrapper should not be called more than' + 'end() wrapper should not be called more than'
+ ' once.'); + ' once.');
if (data) { // We can alter current instance, as it won't be usable after this
Object.keys(data).forEach(k => { // call.
this.fields[k] = data[k]; this.fields = Object.assign(this.fields, data || {});
});
}
return this.logger.log(level, msg, this.fields, true); return this.logger.log(level, msg, this.fields, true);
} }
@ -48,11 +45,7 @@ class EndLogger {
*/ */
addDefaultFields(fields) { addDefaultFields(fields) {
const oldFields = this.fields; const oldFields = this.fields;
if (fields) { this.fields = Object.assign({}, this.fields, fields);
Object.keys(fields).forEach((k) => {
this.fields[k] = fields[k];
});
}
return oldFields; return oldFields;
} }
@ -200,7 +193,7 @@ class RequestLogger {
this.uids = uidList || [ generateUid() ]; this.uids = uidList || [ generateUid() ];
this.entries = []; this.entries = [];
this.fields = new Map(); this.fields = {};
this.logLevel = logLevel; this.logLevel = logLevel;
this.dumpThreshold = dumpThreshold; this.dumpThreshold = dumpThreshold;
this.endLevel = endLevel; this.endLevel = endLevel;
@ -255,7 +248,7 @@ class RequestLogger {
*/ */
addDefaultFields(fields) { addDefaultFields(fields) {
const oldFields = this.fields; const oldFields = this.fields;
copyFields(this.fields, fields); this.fields = Object.assign({}, this.fields, fields);
return oldFields; return oldFields;
} }
@ -440,22 +433,24 @@ class RequestLogger {
}); });
return; return;
} }
let elapsedMs = 0; const fields = Object.assign({}, logFields || {});
const endFlag = isEnd || false; const endFlag = isEnd || false;
/*
* Even though this is added automatically by bunyan, it uses an
* expensive regex to figure out the native Date function. By adding
* timestamp here avoids that expensive call.
*/
if (fields.time === undefined) {
fields.time = new Date();
}
fields.req_id = serializeUids(this.uids);
if (endFlag) { if (endFlag) {
this.elapsedTime = process.hrtime(this.startTime); this.elapsedTime = process.hrtime(this.startTime);
elapsedMs = this.elapsedTime[0] * 1000 fields.elapsed_ms = this.elapsedTime[0] * 1000
+ this.elapsedTime[1] / 1000000; + this.elapsedTime[1] / 1000000;
} }
const fields = new Map();
// Date.now() consumes less cpu ticks than new Date()
fields.set('time', Date.now());
fields.set('req_id', serializeUids(this.uids));
fields.set('elapsed_ms', elapsedMs);
this.fields.forEach((v, k) => {
fields.set(k, v);
});
copyFields(fields, logFields || {});
const logEntry = { const logEntry = {
level, level,
fields, fields,
@ -464,10 +459,9 @@ class RequestLogger {
this.entries.push(logEntry); this.entries.push(logEntry);
if (LogLevel.shouldLog(level, this.dumpThreshold)) { if (LogLevel.shouldLog(level, this.dumpThreshold)) {
for (let i = 0; i < this.entries.length; i++) { this.entries.forEach((entry) => {
const entry = this.entries[i];
this.doLogIO(entry); this.doLogIO(entry);
} });
this.entries = []; this.entries = [];
} else if (LogLevel.shouldLog(level, this.logLevel)) { } else if (LogLevel.shouldLog(level, this.logLevel)) {
this.doLogIO(logEntry); this.doLogIO(logEntry);
@ -490,9 +484,30 @@ class RequestLogger {
* @returns {undefined} * @returns {undefined}
*/ */
doLogIO(logEntry) { doLogIO(logEntry) {
LogLevel.throwIfInvalid(logEntry.level); const fields = Object.assign({}, this.fields, logEntry.fields);
this.bLogger[logEntry.level](logEntry.fields, logEntry.msg); switch (logEntry.level) {
case 'trace':
this.bLogger.trace(fields, logEntry.msg);
break;
case 'debug':
this.bLogger.debug(fields, logEntry.msg);
break;
case 'info':
this.bLogger.info(fields, logEntry.msg);
break;
case 'warn':
this.bLogger.warn(fields, logEntry.msg);
break;
case 'error':
this.bLogger.error(fields, logEntry.msg);
break;
case 'fatal':
this.bLogger.fatal(fields, logEntry.msg);
break;
default:
throw new Error(`Unexpected log level: ${logEntry.level}`);
}
} }
} }

View File

@ -1,63 +0,0 @@
'use strict';
function isWriteableStream(s) {
if (!s.stream) {
return false;
}
// duck typing to check if the obect is a writeable stream
return s.stream.writable;
}
class SimpleLogger {
constructor(name, streams) {
this.name = name;
this.streams = [ { level: 'info', stream: process.stdout } ];
if (streams) {
if (!Array.isArray(streams)) {
throw new Error('Invalid streams. streams must be an array list' +
' of writeable streams');
}
this.streams = streams.filter(isWriteableStream);
}
}
log(level, fields, message) {
for (let i = 0; i < this.streams.length; i++) {
const s = this.streams[i];
const logEntry = Object.create(null);
logEntry.level = level;
logEntry.message = message;
if (fields instanceof Map) {
fields.forEach((v, k) => { logEntry[k] = v; });
}
s.stream.write(JSON.stringify(logEntry) + '\n');
}
}
info(fields, message) {
this.log('info', fields, message);
}
debug(fields, message) {
this.log('debug', fields, message);
}
trace(fields, message) {
this.log('trace', fields, message);
}
warn(fields, message) {
this.log('warn', fields, message);
}
error(fields, message) {
this.log('error', fields, message);
}
fatal(fields, message) {
this.log('fatal', fields, message);
}
}
module.exports = SimpleLogger;

View File

@ -25,16 +25,7 @@ function generateUid() {
* *
*/ */
function serializeUids(uidList) { function serializeUids(uidList) {
// return uidList.join(':'); return uidList.join(':');
let res = '';
const uidLen = uidList.length;
for (let i = 0; i < uidLen; i++) {
res += uidList[i];
if (i !== uidLen - 1) {
res += ':';
}
}
return res;
} }
/** /**
@ -49,24 +40,8 @@ function unserializeUids(stringdata) {
return stringdata.split(':'); return stringdata.split(':');
} }
/**
* This function copies the properties from the source object to the fields map
* @param {map} fields - Map of log entry fields
* @param {object} source - object to copy from
* @returns {map} - map object containing the copied fields
*/
function copyFields(fields, source) {
const sourceArr = Object.keys(source);
for (let i = 0; i < sourceArr.length; i++) {
const key = sourceArr[i];
fields.set(key, source[key]);
}
return fields;
}
module.exports = { module.exports = {
generateUid, generateUid,
serializeUids, serializeUids,
unserializeUids, unserializeUids,
copyFields,
}; };

View File

@ -29,7 +29,9 @@
"url": "https://github.com/scality/werelogs/issues" "url": "https://github.com/scality/werelogs/issues"
}, },
"homepage": "https://github.com/scality/werelogs#readme", "homepage": "https://github.com/scality/werelogs#readme",
"dependencies": {}, "dependencies": {
"bunyan": "^1.5.1"
},
"devDependencies": { "devDependencies": {
"eslint": "^1.10.1", "eslint": "^1.10.1",
"eslint-config-airbnb": "^1.0.2", "eslint-config-airbnb": "^1.0.2",

View File

@ -19,36 +19,33 @@ class DummyLogger {
} }
trace(obj, msg) { trace(obj, msg) {
this._doLog('trace', obj, msg); this.ops.push(['trace', [obj, msg]]);
this.counts.trace += 1;
} }
debug(obj, msg) { debug(obj, msg) {
this._doLog('debug', obj, msg); this.ops.push(['debug', [obj, msg]]);
this.counts.debug += 1;
} }
info(obj, msg) { info(obj, msg) {
this._doLog('info', obj, msg); this.ops.push(['info', [obj, msg]]);
this.counts.info += 1;
} }
warn(obj, msg) { warn(obj, msg) {
this._doLog('warn', obj, msg); this.ops.push(['warn', [obj, msg]]);
this.counts.warn += 1;
} }
error(obj, msg) { error(obj, msg) {
this._doLog('error', obj, msg); this.ops.push(['error', [obj, msg]]);
this.counts.error += 1;
} }
fatal(obj, msg) { fatal(obj, msg) {
this._doLog('fatal', obj, msg); this.ops.push(['fatal', [obj, msg]]);
} this.counts.fatal += 1;
_doLog(level, obj, msg) {
const fields = Object.create(null);
if (obj instanceof Map) {
obj.forEach((v, k) => fields[k] = v);
}
this.ops.push([ level, [ fields, msg]]);
this.counts[level] += 1;
} }
} }

View File

@ -1,47 +1,30 @@
'use strict';
const assert = require('assert'); const assert = require('assert');
const PassThrough = require('stream').PassThrough; const bunyan = require('bunyan');
const pass = new PassThrough;
const Logger = require('werelogs'); const Logger = require('werelogs');
const logBuffer = { const logBuffer = new bunyan.RingBuffer({ limit: 1 });
records: [],
};
pass.on('data', data => {
logBuffer.records.push(data.toString());
});
function createModuleLogger() { function createModuleLogger() {
return new Logger('FT-test', { return new Logger('FT-test', {
level: 'info', level: 'info',
dump: 'error', dump: 'error',
streams: [ { streams: [{
stream: pass,
type: 'raw', type: 'raw',
} ], stream: logBuffer,
}],
}); });
} }
function checkFields(fields) { function checkFields(fields) {
Object.keys(fields).forEach((k) => { Object.keys(fields).forEach((k) => {
if (fields.hasOwnProperty(k)) { if (fields.hasOwnProperty(k)) {
const record = JSON.parse(logBuffer.records[0].trim()); assert.deepStrictEqual(logBuffer.records[0][k], fields[k]);
assert.deepStrictEqual(record[k], fields[k]);
} }
}); });
} }
function parseLogEntry() {
return JSON.parse(logBuffer.records[0].trim());
}
describe('Werelogs is usable as a dependency', () => { describe('Werelogs is usable as a dependency', () => {
describe('Usage of the ModuleLogger', () => { describe('Usage of the ModuleLogger', () => {
afterEach(() => {
logBuffer.records = [];
});
it('Should be able to create a logger', (done) => { it('Should be able to create a logger', (done) => {
assert.doesNotThrow( assert.doesNotThrow(
createModuleLogger, createModuleLogger,
@ -55,7 +38,7 @@ describe('Werelogs is usable as a dependency', () => {
const logger = createModuleLogger(); const logger = createModuleLogger();
const msg = 'This is a simple message'; const msg = 'This is a simple message';
logger.info(msg); logger.info(msg);
assert.strictEqual(parseLogEntry().message, msg); assert.strictEqual(logBuffer.records[0].msg, msg);
done(); done();
}); });
@ -64,16 +47,13 @@ describe('Werelogs is usable as a dependency', () => {
const msg = 'This is a message with added fields'; const msg = 'This is a message with added fields';
const fields = { errorCode: 9, description: 'TestError', options: { dump: false } }; const fields = { errorCode: 9, description: 'TestError', options: { dump: false } };
logger.info(msg, fields); logger.info(msg, fields);
assert.strictEqual(parseLogEntry().message, msg); assert.strictEqual(logBuffer.records[0].msg, msg);
checkFields(fields); checkFields(fields);
done(); done();
}); });
}); });
describe('Usage of the RequestLogger', () => { describe('Usage of the RequestLogger', () => {
afterEach(() => {
logBuffer.records = [];
});
it('Should be able to create a logger', (done) => { it('Should be able to create a logger', (done) => {
assert.doesNotThrow( assert.doesNotThrow(
() => { () => {
@ -89,7 +69,7 @@ describe('Werelogs is usable as a dependency', () => {
const logger = createModuleLogger().newRequestLogger(); const logger = createModuleLogger().newRequestLogger();
const msg = 'This is a simple message'; const msg = 'This is a simple message';
logger.info(msg); logger.info(msg);
assert.strictEqual(parseLogEntry().message, msg); assert.strictEqual(logBuffer.records[0].msg, msg);
done(); done();
}); });
@ -98,7 +78,7 @@ describe('Werelogs is usable as a dependency', () => {
const msg = 'This is a message with added fields'; const msg = 'This is a message with added fields';
const fields = { errorCode: 9, description: 'TestError', options: { dump: false } }; const fields = { errorCode: 9, description: 'TestError', options: { dump: false } };
logger.info(msg, fields); logger.info(msg, fields);
assert.strictEqual(parseLogEntry().message, msg); assert.strictEqual(logBuffer.records[0].msg, msg);
checkFields(fields); checkFields(fields);
done(); done();
}); });

View File

@ -1,5 +1,6 @@
const assert = require('assert'); const assert = require('assert');
const PassThrough = require('stream').PassThrough;
const bunyan = require('bunyan');
const Werelogs = require('werelogs'); const Werelogs = require('werelogs');
const modules = [ const modules = [
@ -7,46 +8,39 @@ const modules = [
require('./module2.js'), require('./module2.js'),
require('./module3.js'), require('./module3.js'),
]; ];
const pass = new PassThrough;
const logBuffer = {
records: [],
};
pass.on('data', data => {
logBuffer.records.push(JSON.parse(data.toString().trim()));
});
describe('Config is shared and unique within one program', () => { describe('Config is shared and unique within one program', () => {
it('should find all log entries in the RingBuffer with the right module name', (done) => { it('should find all log entries in the RingBuffer with the right module name', (done) => {
const rBuffer = new bunyan.RingBuffer({ limit: 15 });
const log = new Werelogs('test-index', { const log = new Werelogs('test-index', {
level: 'debug', level: 'debug',
dump: 'fatal', dump: 'fatal',
streams: [{ streams: [{
type: 'raw', type: 'raw',
stream: pass, stream: rBuffer,
}], }],
}); });
modules.forEach((mod) => { mod(); }); modules.forEach((mod) => { mod(); });
log.warn('Logging as warn'); log.warn('Logging as warn');
const rLog = log.newRequestLogger(); const rLog = log.newRequestLogger();
rLog.info('Logging request as info'); rLog.info('Logging request as info');
assert.deepStrictEqual(logBuffer.records.length, 5, 'Expected to see 5 log entries in the ring buffer.'); assert.deepStrictEqual(rBuffer.records.length, 5, 'Expected to see 5 log entries in the ring buffer.');
assert.deepStrictEqual(logBuffer.records[0].message, 'Logging as info'); assert.deepStrictEqual(rBuffer.records[0].msg, 'Logging as info');
assert.deepStrictEqual(logBuffer.records[0].name, 'test-mod1'); assert.deepStrictEqual(rBuffer.records[0].name, 'test-mod1');
assert.deepStrictEqual(logBuffer.records[0].level, 'info'); assert.deepStrictEqual(rBuffer.records[0].level, 30);
assert.deepStrictEqual(logBuffer.records[1].message, 'Logging as debug'); assert.deepStrictEqual(rBuffer.records[1].msg, 'Logging as debug');
assert.deepStrictEqual(logBuffer.records[1].name, 'test-mod2'); assert.deepStrictEqual(rBuffer.records[1].name, 'test-mod2');
assert.deepStrictEqual(logBuffer.records[1].level, 'debug'); assert.deepStrictEqual(rBuffer.records[1].level, 20);
assert.deepStrictEqual(logBuffer.records[2].message, 'Logging as error'); assert.deepStrictEqual(rBuffer.records[2].msg, 'Logging as error');
assert.deepStrictEqual(logBuffer.records[2].name, 'test-mod3'); assert.deepStrictEqual(rBuffer.records[2].name, 'test-mod3');
assert.deepStrictEqual(logBuffer.records[2].level, 'error'); assert.deepStrictEqual(rBuffer.records[2].level, 50);
assert.deepStrictEqual(logBuffer.records[3].message, 'Logging as warn'); assert.deepStrictEqual(rBuffer.records[3].msg, 'Logging as warn');
assert.deepStrictEqual(logBuffer.records[3].name, 'test-index'); assert.deepStrictEqual(rBuffer.records[3].name, 'test-index');
assert.deepStrictEqual(logBuffer.records[3].level, 'warn'); assert.deepStrictEqual(rBuffer.records[3].level, 40);
assert.deepStrictEqual(logBuffer.records[4].message, 'Logging request as info'); assert.deepStrictEqual(rBuffer.records[4].msg, 'Logging request as info');
assert.deepStrictEqual(logBuffer.records[4].name, 'test-index'); assert.deepStrictEqual(rBuffer.records[4].name, 'test-index');
assert.deepStrictEqual(logBuffer.records[4].level, 'info'); assert.deepStrictEqual(rBuffer.records[4].level, 30);
assert.notStrictEqual(logBuffer.records[4].req_id, undefined); assert.notStrictEqual(rBuffer.records[4].req_id, undefined);
done(); done();
}); });
}); });

View File

@ -1,6 +1,8 @@
const assert = require('assert'); const assert = require('assert');
const bunyan = require('bunyan');
const Config = require('../../lib/Config.js'); const Config = require('../../lib/Config.js');
const logBuffer = new bunyan.RingBuffer({ limit: 1 });
describe('Config', () => { describe('Config', () => {
beforeEach(() => { beforeEach(() => {
@ -38,6 +40,15 @@ describe('Config', () => {
done(); done();
}); });
it('streams should be updateable', (done) => {
const origStreams = Config.streams;
const origLogger = Config.logger;
Config.update({ streams: [{ type: 'raw', stream: logBuffer }] });
assert.notStrictEqual(origStreams, Config.streams, 'Expected Config\'s streams to have been updated.');
assert.notStrictEqual(origLogger, Config.Logger, 'Expected Config\'s logger to have been replaced by update.');
done();
});
it('should not be modified by an empty config object', (done) => { it('should not be modified by an empty config object', (done) => {
const origLevel = Config.level; const origLevel = Config.level;
const origDump = Config.dump; const origDump = Config.dump;

View File

@ -27,7 +27,7 @@ function filterGenerator(logLevel, callLevel) {
* Here, patch the Config by setting a specificly designed dummyLogger * Here, patch the Config by setting a specificly designed dummyLogger
* for testing purposes that will help us collect runtime data. * for testing purposes that will help us collect runtime data.
*/ */
Config.simpleLogger = dummyLogger; Config.bLogger = dummyLogger;
return logger; return logger;
} }
@ -174,7 +174,7 @@ describe('WereLogs Logger is usable:', () => {
it('Uses the additional fields as expected', (done) => { it('Uses the additional fields as expected', (done) => {
const dummyLogger = new DummyLogger(); const dummyLogger = new DummyLogger();
const logger = new Logger('test'); const logger = new Logger('test');
Config.simpleLogger = dummyLogger; Config.bLogger = dummyLogger;
const fields = { const fields = {
ip: '127.0.0.1', ip: '127.0.0.1',
method: 'GET', method: 'GET',
@ -195,7 +195,7 @@ describe('WereLogs Logger is usable:', () => {
]; ];
function createMisusableLogger(dummyLogger) { function createMisusableLogger(dummyLogger) {
const logger = new Logger('test'); const logger = new Logger('test');
Config.simpleLogger = dummyLogger; Config.bLogger = dummyLogger;
return logger; return logger;
} }

View File

@ -5,7 +5,6 @@ const Utils = require('../../lib/Utils.js');
const generateUid = Utils.generateUid; const generateUid = Utils.generateUid;
const serializeUids = Utils.serializeUids; const serializeUids = Utils.serializeUids;
const unserializeUids = Utils.unserializeUids; const unserializeUids = Utils.unserializeUids;
const copyFields = Utils.copyFields;
describe('Utils: generateUid', () => { describe('Utils: generateUid', () => {
it('generates a string-typed ID', (done) => { it('generates a string-typed ID', (done) => {
@ -43,19 +42,3 @@ describe('Utils: serializeUids', () => {
done(); done();
}); });
}); });
describe('Utils: copyFields', () => {
it('copies all the properties from source to target object', (done) => {
const target = new Map();
target.set('foo', 'bar');
const source = { id: 1, name: 'demo', value: { a: 1, b: 2, c: 3 } };
const result = new Map();
result.set('foo', 'bar');
result.set('id', 1);
result.set('name', 'demo');
result.set('value', { a: 1, b: 2, c: 3 });
copyFields(target, source);
assert.deepStrictEqual(target, result, 'target should have the same properties as source');
done();
});
});