Compare commits

...

6 Commits

Author SHA1 Message Date
Rahul Padigela 95822076e2 perf: simple for loop in favor of foreach 2016-05-27 18:53:54 -07:00
Rahul Padigela 50e23152e2 perf: use Date.now() for timestamp generation
V8 profiling revealed that under load new Date() consumes a ton of
cpu ticks, using Date.now() is ~33% faster. The idea is filebeat or
logstash component would parse this long int timestamp into ISO format.
2016-05-27 18:53:54 -07:00
Rahul Padigela 87419bfe7d FT: STDOUT logging
Introduces SimpleLogger interface which writes the logs in JSON
to STDOUT

Fix #60
2016-05-27 18:53:48 -07:00
Rahul Padigela bcaa7d3805 cleanup: Remove bunyan, bunyan-logstash dependencies
Per 2.0 design, we are removing these modules in favor of STDOUT
logging.
2016-05-26 18:22:32 -07:00
Rahul Padigela c1df2a57c3 cleanup: use loglevel utility 2016-05-26 18:22:32 -07:00
Rahul Padigela 4841555211 perf: use Map instead of Object for fields
Since the log object's properties get set, updated, deleted a lot
the object qualifies as hot object/hash table mode, making it less
performant. Maps fit in this situation as they enable the usage of
hash maps efficiently.
2016-05-26 18:22:32 -07:00
13 changed files with 256 additions and 115 deletions

38
DESIGN-2.0.md Normal file
View File

@ -0,0 +1,38 @@
# Werelogs 2.0
## Current Issues
Logging currently is taxing a lot on the node.js process it's being called from.
Upon closer look `bunyan` and `bunyan-logstash` modules are doing parsing,
stringifying, decorating and object-copying on top of what Werelogs is already
doing.
## Proposal
1. Remove bunyan and bunyan-logstash modules from Werelogs
2. Werelogs would be logging directly to STDOUT.
Logs will be in the same JSON format as we have had when using bunyan,
bunyan-logstash. Any stacktrace printed by the process would be sent to
STDERR.
3. Use Filebeat - a lightweight log shipper that can ship logs directly to
Logstash or Elasticsearch
Filebeat is an agent that can attach to an individual process. So the idea
is to attach a Filebeat agent to each S3, bucketd, repd, vaultd etc.
processes and read directly from STDOUT. Since it can rotate the logs on its
own, we can write JSON logs with rotation on the disk (if needed).
We can also generate SYSLOG format(if needed).
## Work in progress
1. Werelogs#wip/2.0 branch has bunyan modules removed, logging to STDOUT and
removes some object copying overhead.
## To be done
1. Install Filebeat to work with MetaData and send the logs directly to Elasticsearch
2. Explore if we have to go through Logstash from Filebeat for additonal decoration or
normalization
3. Work on Federation to generate required config and spawning of Filebeat agents with
the deployed processes.

View File

@ -1,9 +1,8 @@
'use strict';
const bunyan = require('bunyan');
const LogLevel = require('./LogLevel.js');
const SimpleLogger = require('./SimpleLogger.js');
/**
* This class is the central point of the once-only configuration system for
* werelogs. It is instanciated once to be exported, and all configuration
@ -98,10 +97,7 @@ class Config {
return stream;
});
this.bLogger = bunyan.createLogger({
name: 'werelogs',
streams: this.streams,
});
this.simpleLogger = new SimpleLogger( 'werelogs', this.streams );
}
return this;
@ -117,7 +113,7 @@ class Config {
* operations by the user code.
*/
get logger() {
return this.bLogger;
return this.simpleLogger;
}
/**

View File

@ -83,7 +83,8 @@ class Logger {
_doLog(levelName, msg, data) {
const bLogger = Config.logger;
const finalData = { name: this.name };
const finalData = new Map();
finalData.set('name', this.name);
if (!LogLevel.shouldLog(levelName, Config.level)) {
return;
}
@ -98,7 +99,7 @@ class Logger {
}
if (data) {
Object.keys(data).forEach((k) => {
finalData[k] = data[k];
finalData.set(k, data[k]);
});
}
const args = [ finalData, msg ];

View File

@ -6,6 +6,7 @@ const LogLevel = require('./LogLevel.js');
const Utils = require('./Utils.js');
const serializeUids = Utils.serializeUids;
const generateUid = Utils.generateUid;
const copyFields = Utils.copyFields;
function ensureUidValidity(uid) {
if (uid.indexOf(':') !== -1) {
@ -17,16 +18,18 @@ function ensureUidValidity(uid) {
class EndLogger {
constructor(reqLogger) {
this.logger = reqLogger;
this.fields = {};
this.fields = Object.create(null);
}
augmentedLog(level, msg, data) {
assert.strictEqual(this.logger.elapsedTime, null, 'The logger\'s'
+ 'end() wrapper should not be called more than'
+ ' once.');
// We can alter current instance, as it won't be usable after this
// call.
this.fields = Object.assign(this.fields, data || {});
if (data) {
Object.keys(data).forEach(k => {
this.fields[k] = data[k];
});
}
return this.logger.log(level, msg, this.fields, true);
}
@ -45,7 +48,11 @@ class EndLogger {
*/
addDefaultFields(fields) {
const oldFields = this.fields;
this.fields = Object.assign({}, this.fields, fields);
if (fields) {
Object.keys(fields).forEach((k) => {
this.fields[k] = fields[k];
});
}
return oldFields;
}
@ -193,7 +200,7 @@ class RequestLogger {
this.uids = uidList || [ generateUid() ];
this.entries = [];
this.fields = {};
this.fields = new Map();
this.logLevel = logLevel;
this.dumpThreshold = dumpThreshold;
this.endLevel = endLevel;
@ -248,7 +255,7 @@ class RequestLogger {
*/
addDefaultFields(fields) {
const oldFields = this.fields;
this.fields = Object.assign({}, this.fields, fields);
copyFields(this.fields, fields);
return oldFields;
}
@ -433,24 +440,22 @@ class RequestLogger {
});
return;
}
const fields = Object.assign({}, logFields || {});
let elapsedMs = 0;
const endFlag = isEnd || false;
/*
* Even though this is added automatically by bunyan, it uses an
* expensive regex to figure out the native Date function. By adding
* timestamp here avoids that expensive call.
*/
if (fields.time === undefined) {
fields.time = new Date();
}
fields.req_id = serializeUids(this.uids);
if (endFlag) {
this.elapsedTime = process.hrtime(this.startTime);
fields.elapsed_ms = this.elapsedTime[0] * 1000
elapsedMs = this.elapsedTime[0] * 1000
+ this.elapsedTime[1] / 1000000;
}
const fields = new Map();
// Date.now() consumes less cpu ticks than new Date()
fields.set('time', Date.now());
fields.set('req_id', serializeUids(this.uids));
fields.set('elapsed_ms', elapsedMs);
this.fields.forEach((v, k) => {
fields.set(k, v);
});
copyFields(fields, logFields || {});
const logEntry = {
level,
fields,
@ -459,9 +464,10 @@ class RequestLogger {
this.entries.push(logEntry);
if (LogLevel.shouldLog(level, this.dumpThreshold)) {
this.entries.forEach((entry) => {
for (let i = 0; i < this.entries.length; i++) {
const entry = this.entries[i];
this.doLogIO(entry);
});
}
this.entries = [];
} else if (LogLevel.shouldLog(level, this.logLevel)) {
this.doLogIO(logEntry);
@ -484,30 +490,9 @@ class RequestLogger {
* @returns {undefined}
*/
doLogIO(logEntry) {
const fields = Object.assign({}, this.fields, logEntry.fields);
LogLevel.throwIfInvalid(logEntry.level);
switch (logEntry.level) {
case 'trace':
this.bLogger.trace(fields, logEntry.msg);
break;
case 'debug':
this.bLogger.debug(fields, logEntry.msg);
break;
case 'info':
this.bLogger.info(fields, logEntry.msg);
break;
case 'warn':
this.bLogger.warn(fields, logEntry.msg);
break;
case 'error':
this.bLogger.error(fields, logEntry.msg);
break;
case 'fatal':
this.bLogger.fatal(fields, logEntry.msg);
break;
default:
throw new Error(`Unexpected log level: ${logEntry.level}`);
}
this.bLogger[logEntry.level](logEntry.fields, logEntry.msg);
}
}

63
lib/SimpleLogger.js Normal file
View File

@ -0,0 +1,63 @@
'use strict';
function isWriteableStream(s) {
if (!s.stream) {
return false;
}
// duck typing to check if the obect is a writeable stream
return s.stream.writable;
}
class SimpleLogger {
constructor(name, streams) {
this.name = name;
this.streams = [ { level: 'info', stream: process.stdout } ];
if (streams) {
if (!Array.isArray(streams)) {
throw new Error('Invalid streams. streams must be an array list' +
' of writeable streams');
}
this.streams = streams.filter(isWriteableStream);
}
}
log(level, fields, message) {
for (let i = 0; i < this.streams.length; i++) {
const s = this.streams[i];
const logEntry = Object.create(null);
logEntry.level = level;
logEntry.message = message;
if (fields instanceof Map) {
fields.forEach((v, k) => { logEntry[k] = v; });
}
s.stream.write(JSON.stringify(logEntry) + '\n');
}
}
info(fields, message) {
this.log('info', fields, message);
}
debug(fields, message) {
this.log('debug', fields, message);
}
trace(fields, message) {
this.log('trace', fields, message);
}
warn(fields, message) {
this.log('warn', fields, message);
}
error(fields, message) {
this.log('error', fields, message);
}
fatal(fields, message) {
this.log('fatal', fields, message);
}
}
module.exports = SimpleLogger;

View File

@ -25,7 +25,16 @@ function generateUid() {
*
*/
function serializeUids(uidList) {
return uidList.join(':');
// return uidList.join(':');
let res = '';
const uidLen = uidList.length;
for (let i = 0; i < uidLen; i++) {
res += uidList[i];
if (i !== uidLen - 1) {
res += ':';
}
}
return res;
}
/**
@ -40,8 +49,24 @@ function unserializeUids(stringdata) {
return stringdata.split(':');
}
/**
* This function copies the properties from the source object to the fields map
* @param {map} fields - Map of log entry fields
* @param {object} source - object to copy from
* @returns {map} - map object containing the copied fields
*/
function copyFields(fields, source) {
const sourceArr = Object.keys(source);
for (let i = 0; i < sourceArr.length; i++) {
const key = sourceArr[i];
fields.set(key, source[key]);
}
return fields;
}
module.exports = {
generateUid,
serializeUids,
unserializeUids,
copyFields,
};

View File

@ -29,9 +29,7 @@
"url": "https://github.com/scality/werelogs/issues"
},
"homepage": "https://github.com/scality/werelogs#readme",
"dependencies": {
"bunyan": "^1.5.1"
},
"dependencies": {},
"devDependencies": {
"eslint": "^1.10.1",
"eslint-config-airbnb": "^1.0.2",

View File

@ -19,33 +19,36 @@ class DummyLogger {
}
trace(obj, msg) {
this.ops.push(['trace', [obj, msg]]);
this.counts.trace += 1;
this._doLog('trace', obj, msg);
}
debug(obj, msg) {
this.ops.push(['debug', [obj, msg]]);
this.counts.debug += 1;
this._doLog('debug', obj, msg);
}
info(obj, msg) {
this.ops.push(['info', [obj, msg]]);
this.counts.info += 1;
this._doLog('info', obj, msg);
}
warn(obj, msg) {
this.ops.push(['warn', [obj, msg]]);
this.counts.warn += 1;
this._doLog('warn', obj, msg);
}
error(obj, msg) {
this.ops.push(['error', [obj, msg]]);
this.counts.error += 1;
this._doLog('error', obj, msg);
}
fatal(obj, msg) {
this.ops.push(['fatal', [obj, msg]]);
this.counts.fatal += 1;
this._doLog('fatal', obj, msg);
}
_doLog(level, obj, msg) {
const fields = Object.create(null);
if (obj instanceof Map) {
obj.forEach((v, k) => fields[k] = v);
}
this.ops.push([ level, [ fields, msg]]);
this.counts[level] += 1;
}
}

View File

@ -1,30 +1,47 @@
'use strict';
const assert = require('assert');
const bunyan = require('bunyan');
const PassThrough = require('stream').PassThrough;
const pass = new PassThrough;
const Logger = require('werelogs');
const logBuffer = new bunyan.RingBuffer({ limit: 1 });
const logBuffer = {
records: [],
};
pass.on('data', data => {
logBuffer.records.push(data.toString());
});
function createModuleLogger() {
return new Logger('FT-test', {
level: 'info',
dump: 'error',
streams: [ {
stream: pass,
type: 'raw',
stream: logBuffer,
} ],
});
}
function checkFields(fields) {
Object.keys(fields).forEach((k) => {
if (fields.hasOwnProperty(k)) {
assert.deepStrictEqual(logBuffer.records[0][k], fields[k]);
const record = JSON.parse(logBuffer.records[0].trim());
assert.deepStrictEqual(record[k], fields[k]);
}
});
}
function parseLogEntry() {
return JSON.parse(logBuffer.records[0].trim());
}
describe('Werelogs is usable as a dependency', () => {
describe('Usage of the ModuleLogger', () => {
afterEach(() => {
logBuffer.records = [];
});
it('Should be able to create a logger', (done) => {
assert.doesNotThrow(
createModuleLogger,
@ -38,7 +55,7 @@ describe('Werelogs is usable as a dependency', () => {
const logger = createModuleLogger();
const msg = 'This is a simple message';
logger.info(msg);
assert.strictEqual(logBuffer.records[0].msg, msg);
assert.strictEqual(parseLogEntry().message, msg);
done();
});
@ -47,13 +64,16 @@ describe('Werelogs is usable as a dependency', () => {
const msg = 'This is a message with added fields';
const fields = { errorCode: 9, description: 'TestError', options: { dump: false } };
logger.info(msg, fields);
assert.strictEqual(logBuffer.records[0].msg, msg);
assert.strictEqual(parseLogEntry().message, msg);
checkFields(fields);
done();
});
});
describe('Usage of the RequestLogger', () => {
afterEach(() => {
logBuffer.records = [];
});
it('Should be able to create a logger', (done) => {
assert.doesNotThrow(
() => {
@ -69,7 +89,7 @@ describe('Werelogs is usable as a dependency', () => {
const logger = createModuleLogger().newRequestLogger();
const msg = 'This is a simple message';
logger.info(msg);
assert.strictEqual(logBuffer.records[0].msg, msg);
assert.strictEqual(parseLogEntry().message, msg);
done();
});
@ -78,7 +98,7 @@ describe('Werelogs is usable as a dependency', () => {
const msg = 'This is a message with added fields';
const fields = { errorCode: 9, description: 'TestError', options: { dump: false } };
logger.info(msg, fields);
assert.strictEqual(logBuffer.records[0].msg, msg);
assert.strictEqual(parseLogEntry().message, msg);
checkFields(fields);
done();
});

View File

@ -1,6 +1,5 @@
const assert = require('assert');
const bunyan = require('bunyan');
const PassThrough = require('stream').PassThrough;
const Werelogs = require('werelogs');
const modules = [
@ -8,39 +7,46 @@ const modules = [
require('./module2.js'),
require('./module3.js'),
];
const pass = new PassThrough;
const logBuffer = {
records: [],
};
pass.on('data', data => {
logBuffer.records.push(JSON.parse(data.toString().trim()));
});
describe('Config is shared and unique within one program', () => {
it('should find all log entries in the RingBuffer with the right module name', (done) => {
const rBuffer = new bunyan.RingBuffer({ limit: 15 });
const log = new Werelogs('test-index', {
level: 'debug',
dump: 'fatal',
streams: [{
type: 'raw',
stream: rBuffer,
stream: pass,
}],
});
modules.forEach((mod) => { mod(); });
log.warn('Logging as warn');
const rLog = log.newRequestLogger();
rLog.info('Logging request as info');
assert.deepStrictEqual(rBuffer.records.length, 5, 'Expected to see 5 log entries in the ring buffer.');
assert.deepStrictEqual(rBuffer.records[0].msg, 'Logging as info');
assert.deepStrictEqual(rBuffer.records[0].name, 'test-mod1');
assert.deepStrictEqual(rBuffer.records[0].level, 30);
assert.deepStrictEqual(rBuffer.records[1].msg, 'Logging as debug');
assert.deepStrictEqual(rBuffer.records[1].name, 'test-mod2');
assert.deepStrictEqual(rBuffer.records[1].level, 20);
assert.deepStrictEqual(rBuffer.records[2].msg, 'Logging as error');
assert.deepStrictEqual(rBuffer.records[2].name, 'test-mod3');
assert.deepStrictEqual(rBuffer.records[2].level, 50);
assert.deepStrictEqual(rBuffer.records[3].msg, 'Logging as warn');
assert.deepStrictEqual(rBuffer.records[3].name, 'test-index');
assert.deepStrictEqual(rBuffer.records[3].level, 40);
assert.deepStrictEqual(rBuffer.records[4].msg, 'Logging request as info');
assert.deepStrictEqual(rBuffer.records[4].name, 'test-index');
assert.deepStrictEqual(rBuffer.records[4].level, 30);
assert.notStrictEqual(rBuffer.records[4].req_id, undefined);
assert.deepStrictEqual(logBuffer.records.length, 5, 'Expected to see 5 log entries in the ring buffer.');
assert.deepStrictEqual(logBuffer.records[0].message, 'Logging as info');
assert.deepStrictEqual(logBuffer.records[0].name, 'test-mod1');
assert.deepStrictEqual(logBuffer.records[0].level, 'info');
assert.deepStrictEqual(logBuffer.records[1].message, 'Logging as debug');
assert.deepStrictEqual(logBuffer.records[1].name, 'test-mod2');
assert.deepStrictEqual(logBuffer.records[1].level, 'debug');
assert.deepStrictEqual(logBuffer.records[2].message, 'Logging as error');
assert.deepStrictEqual(logBuffer.records[2].name, 'test-mod3');
assert.deepStrictEqual(logBuffer.records[2].level, 'error');
assert.deepStrictEqual(logBuffer.records[3].message, 'Logging as warn');
assert.deepStrictEqual(logBuffer.records[3].name, 'test-index');
assert.deepStrictEqual(logBuffer.records[3].level, 'warn');
assert.deepStrictEqual(logBuffer.records[4].message, 'Logging request as info');
assert.deepStrictEqual(logBuffer.records[4].name, 'test-index');
assert.deepStrictEqual(logBuffer.records[4].level, 'info');
assert.notStrictEqual(logBuffer.records[4].req_id, undefined);
done();
});
});

View File

@ -1,8 +1,6 @@
const assert = require('assert');
const bunyan = require('bunyan');
const Config = require('../../lib/Config.js');
const logBuffer = new bunyan.RingBuffer({ limit: 1 });
describe('Config', () => {
beforeEach(() => {
@ -40,15 +38,6 @@ describe('Config', () => {
done();
});
it('streams should be updateable', (done) => {
const origStreams = Config.streams;
const origLogger = Config.logger;
Config.update({ streams: [{ type: 'raw', stream: logBuffer }] });
assert.notStrictEqual(origStreams, Config.streams, 'Expected Config\'s streams to have been updated.');
assert.notStrictEqual(origLogger, Config.Logger, 'Expected Config\'s logger to have been replaced by update.');
done();
});
it('should not be modified by an empty config object', (done) => {
const origLevel = Config.level;
const origDump = Config.dump;

View File

@ -27,7 +27,7 @@ function filterGenerator(logLevel, callLevel) {
* Here, patch the Config by setting a specificly designed dummyLogger
* for testing purposes that will help us collect runtime data.
*/
Config.bLogger = dummyLogger;
Config.simpleLogger = dummyLogger;
return logger;
}
@ -174,7 +174,7 @@ describe('WereLogs Logger is usable:', () => {
it('Uses the additional fields as expected', (done) => {
const dummyLogger = new DummyLogger();
const logger = new Logger('test');
Config.bLogger = dummyLogger;
Config.simpleLogger = dummyLogger;
const fields = {
ip: '127.0.0.1',
method: 'GET',
@ -195,7 +195,7 @@ describe('WereLogs Logger is usable:', () => {
];
function createMisusableLogger(dummyLogger) {
const logger = new Logger('test');
Config.bLogger = dummyLogger;
Config.simpleLogger = dummyLogger;
return logger;
}

View File

@ -5,6 +5,7 @@ const Utils = require('../../lib/Utils.js');
const generateUid = Utils.generateUid;
const serializeUids = Utils.serializeUids;
const unserializeUids = Utils.unserializeUids;
const copyFields = Utils.copyFields;
describe('Utils: generateUid', () => {
it('generates a string-typed ID', (done) => {
@ -42,3 +43,19 @@ describe('Utils: serializeUids', () => {
done();
});
});
describe('Utils: copyFields', () => {
it('copies all the properties from source to target object', (done) => {
const target = new Map();
target.set('foo', 'bar');
const source = { id: 1, name: 'demo', value: { a: 1, b: 2, c: 3 } };
const result = new Map();
result.set('foo', 'bar');
result.set('id', 1);
result.set('name', 'demo');
result.set('value', { a: 1, b: 2, c: 3 });
copyFields(target, source);
assert.deepStrictEqual(target, result, 'target should have the same properties as source');
done();
});
});