Compare commits

..

No commits in common. "c75435ec8cc1cce018fc3b42f4680276acfbab87" and "ac016eb00f06f084b59c59a387fc8874c50b81c5" have entirely different histories.

6 changed files with 68 additions and 942 deletions

View File

@ -1,247 +0,0 @@
'use strict'; // eslint-disable-line strict
const Utils = require('./Utils.js');
const objectCopy = Utils.objectCopy;
/**
* This class manages the internal `default fields` for logging classes, with a
* notion of parent/child relationships and hierarchy.
* A Child element will inherit the fields from the Parent element, and
* complete (or even override part of it) with its own fields. For instance,
* a child can redefine (and thus override in the final result) a field that is
* already defined by its parent.
*
* This class shall be used embedded within loggers, to provide the `Default
* fields` and hierarchy logics.
*
* @private @property {DefaultFields} parent - The parent node in the
* hierarchy
* @private @property {DefaultFields[]} children - The list of children nodes
* in the hierarchy
* @private @property {Object} fields - The dictionary of fields
* defined for this node
* @private @property {Object} parentFields - The dictionary of fields
* inherited by this node from
* its parent node
* @private @property {Object} precomputedFields - The prepared dictionary of
* aggregated fields containing
* the parent's fields
* overriden with the node's
* own fields. This is used as
* a trick to reduce the
* computational need.
*
* @mixin
*/
class DefaultFields {
constructor() {
this.parent = null;
this.children = [];
this.fields = {};
this.parentFields = {};
this.precomputedFields = {};
}
/**
* This function allows to manually destroy a node, so that all references
* to parents or children are released. This helps in avoiding the creation
* of memory leaks due to our internal cyclic references that cannot be
* automatically released due to the mechanics of GC in Node.JS, and its
* shitty prevention of meaningful OOP designs.
*
* All hail << The Coming of the Great White Handkerchief >> that will
* wipe this misery out of our lives !
*
* @returns {undefined}
*/
_dfDestroy() {
this.children.forEach(child => {
child.setParent(null);
});
this.setParent(null);
this.children = [];
}
/**
* Add a node to the list of children of the current node, effectively
* making it one of the node's children.
*
* @param {DefaultFields} child - The node to add as a child of the current
* node
*
* @return {undefined}
*/
_dfRegisterChild(child) {
this.children.push(child);
this._dfNotifyChild(child);
}
/**
* Remove a node from the list of children of the current node, effectively
* cutting off the relationship between the two.
*
* @param {DefaultFields} toRemove - The node to remove from the list of
* children of the current node.
*
* @return {undefined}
*/
_dfUnregisterChild(toRemove) {
this.children = this.children.filter(child => child !== toRemove);
}
/**
* Utility function to notify one child node of an update of the node's
* precomputed fields.
*
* @param {DefaultField} child - The child node to notify
*
* @return {undefined}
*/
_dfNotifyChild(child) {
child._dfSetParentFields(this.precomputedFields);
}
/**
* Utility function to notify every children node of an update of the
* node's precomputed fields.
*
* @return {undefined}
*/
_dfNotifyChildren() {
this.children.forEach(child => this._dfNotifyChild(child));
}
/**
* This function allows changing the Parent node of the current node,
* consequently changing the resulting aggregation of the hierarchy of
* fields. This can be used for a temporary switch of parent node.
*
* @param {DefaultFields} parent - The new parent node to set for the
* current node
*
* @return {DefaultFields|null} The previous parent
*/
setParent(parent) {
const oldParent = this.parent;
if (parent === oldParent) {
return oldParent;
}
if (oldParent) {
oldParent._dfUnregisterChild(this);
}
this.parent = parent || null;
if (this.parent) {
this.parent._dfRegisterChild(this);
} else {
this._dfSetParentFields();
}
return oldParent;
}
/**
* Internal function to partially recompute the precomputedFields through
* inclusion of the newly defined fields into the precomputed ones. This
* inclusion may override some already defined fields.
*
* This function shall always be the last one called when updating the
* internal fields, as it also triggers the update of the children nodes.
*
* @param {Object} newFields - The dictionary of newFields to include into
* the precomputedFields
*
* @return {undefined}
*/
_dfAugmentPrecomputedFields() {
objectCopy(this.precomputedFields, this.fields);
this._dfNotifyChildren();
}
/**
* Internal function to update the fields provided by the parent node in
* the DefaultFields hierarchy. It serves as a notification hook to refresh
* the precomputed fields depending on the parent node's fields.
* Two situations may lead to calling this function:
* 1. The parent node's updated its preComputed fields and notified its
* children, including the current node
* 2. The node reset its own fields, and we must re-compute to remove
* obsolete fields previously provided by the current node.
*
* @param {Object} parentFields - the precomputedFields from the parent node
*
* @return {undefined}
*/
_dfSetParentFields(parentFields) {
this.parentFields = parentFields || {};
this.precomputedFields = objectCopy({}, this.parentFields);
this._dfAugmentPrecomputedFields();
}
/**
* This function allows to reset the fields managed by the DefaultFields
* instance. It automatically triggers the re-computation of the
* precomputed fields, cascading through the node and its children.
*
* /!\ This function may lead to an important use of the computational
* resources if over-used.
*
* @return {undefined}
*/
resetDefaultFields() {
const oldFields = this.fields;
this.fields = {};
// Automatically triggers the recomputation of precomputedFields
this._dfSetParentFields(this.parentFields);
return oldFields;
}
/**
* This function allows the user to remove one or more items from the
* defaultFields's dict.
*
* @param {String[]} fields - List of the names of the fields to be removed
* from the internal dictionary of default
* fields
*
* @return {Object} The previous set of default fields
*/
removeDefaultFields(fields) {
const toRemove = {};
fields.forEach(key => {
toRemove[key] = undefined;
});
return this.addDefaultFields(toRemove);
}
/**
* This function allows the user to add default fields to include into all
* JSON log entries generated through this request logger. As this function
* attempt not to modify the provided fields object, it copies the field
* into a new object for safe keeping.
*
* @param {Object} fields The dictionnary of additional fields to include
* by default for this instance of the
* RequestLogger.
*
* @return {Object} The previous set of default fields (can be
* safely ignored).
*/
addDefaultFields(fields) {
const oldFields = this.fields;
this.fields = objectCopy({}, this.fields, fields);
this._dfAugmentPrecomputedFields(fields);
return oldFields;
}
/**
* This function returns the node's precomputed fields, that includes all
* of its hierarchy's fields and its own. This is intended to retrieve the
* final form of the fields managed by the object.
*
* @return {Object} The precomputed fields to be added to a log entry
*/
_dfGetFields() {
return this.precomputedFields;
}
}
module.exports = DefaultFields;

View File

@ -2,26 +2,17 @@
const LogLevel = require('./LogLevel.js');
const RequestLogger = require('./RequestLogger.js');
const Utils = require('./Utils.js');
const unserializeUids = require('./Utils.js').unserializeUids;
const Config = require('./Config.js');
const DefaultFields = require('./DefaultFields.js');
const unserializeUids = Utils.unserializeUids;
const objectCopy = Utils.objectCopy;
class Logger extends DefaultFields {
class Logger {
/**
* This is the constructor of the Logger class. It takes optional
* configuration parameters, that allow to modify its behavior.
*
* @param {string|Object} fields - Deprecated: {string}: The name of the
* Logger that will be included in the log
* entries
* Advised: {Object} A set of fields that
* will be used as the default fields for
* this Logger, and will be included in
* all later log entries.
* @param {string} name - The name of the Logger. It can be found later on
* in the log entries.
*
* @param {object} config - A configuration object for werelogs.
* @param {string} config.level - The string name of the logging level
@ -32,24 +23,21 @@ class Logger extends DefaultFields {
* ('trace', 'debug', 'info', 'warn',
* 'error' and 'fatal' in order of
* importance.)
* @param {object[]} config.streams - The array of streams into which to
* log. Their configuration is directly
* related to the expected bunyan
* streams array, for compatibility
* purposes (except that the 'level'
* field is not accounted for)
*
* @see [Bunyan's documentation]{@link
* https://github.com/trentm/node-bunyan/blob/master/README.md#streams} for
* a more detailed description of the streams array configuration.
*
* @returns {undefined}
*/
constructor(fields, config) {
super();
/* TODO XXX FIXME Remove starting at version 8.0 FIXME XXX TODO
* vvvvvvvvvvvvvvvvvvvvvvvv */
if (typeof fields === 'string') {
this.addDefaultFields({ name: fields });
} else if (typeof fields === 'object') {
/* ^^^^^^^^^^^^^^^^^^^^^^^
* TODO XXX FIXME Remove starting at version 8.0 FIXME XXX TODO */
this.addDefaultFields(fields);
/* TODO XXX FIXME Remove starting at version 8.0 FIXME XXX TODO
* vvvvvvvvvvvvvvvvvvvvvvvv */
}
/* ^^^^^^^^^^^^^^^^^^^^^^^
* TODO XXX FIXME Remove starting at version 8.0 FIXME XXX TODO */
constructor(name, config) {
this.name = name;
Config.update(config);
}
@ -73,7 +61,7 @@ class Logger extends DefaultFields {
const rLog = new RequestLogger(Config.logger,
Config.level, Config.dump, Config.end,
uids);
rLog.setParent(this);
rLog.addDefaultFields({ name: this.name });
return rLog;
}
@ -89,12 +77,13 @@ class Logger extends DefaultFields {
const rLog = new RequestLogger(Config.logger,
Config.level, Config.dump, Config.end,
unserializeUids(serializedUids));
rLog.setParent(this);
rLog.addDefaultFields({ name: this.name });
return rLog;
}
_doLog(levelName, msg, data) {
const sLogger = Config.logger;
const finalData = { name: this.name, time: Date.now() };
if (!LogLevel.shouldLog(levelName, Config.level)) {
return;
}
@ -107,9 +96,6 @@ class Logger extends DefaultFields {
+ ' This development error should be fixed ASAP.');
return;
}
const finalData = objectCopy({},
this._dfGetFields(),
{ time: Date.now() });
if (data) {
Object.keys(data).forEach(k => {
finalData[k] = data[k];

View File

@ -4,7 +4,6 @@ const assert = require('assert');
const LogLevel = require('./LogLevel.js');
const Utils = require('./Utils.js');
const DefaultFields = require('./DefaultFields.js');
const serializeUids = Utils.serializeUids;
const generateUid = Utils.generateUid;
const objectCopy = Utils.objectCopy;
@ -17,38 +16,39 @@ function ensureUidValidity(uid) {
return uid;
}
/*
* @mixes DefaultFields
*/
class EndLogger extends DefaultFields {
class EndLogger {
constructor(reqLogger) {
super();
this.logger = reqLogger;
this.setParent(reqLogger);
this.fields = {};
}
augmentedLog(level, msg, data) {
assert.strictEqual(this.logger.elapsedTime, null, 'The logger\'s'
+ 'end() wrapper should not be called more than'
+ ' once.');
const fields = objectCopy(this.precomputedFields, data || {});
const res = this.logger.log(level, msg, fields, true);
/* XXX NOTE
*
* This is our "manual" memory management.
* Since child and parent `DefaultFields` are actually refering to each
* other, the GC cannot release the child logger.
* Also, we have no means to automatically unregister the child from
* its parent, nor to have the parent notify the child of its update
* without keeping those references. So here we go, manual memory
* management in Garbage-collected languages !
*
* All hail << The Coming of the Great White Handkerchief >> that will
* wipe this misery out of our lives !
*
* XXX NOTE */
this._dfDestroy();
return res;
// We can alter current instance, as it won't be usable after this
// call.
this.fields = objectCopy(this.fields, data || {});
return this.logger.log(level, msg, this.fields, true);
}
/**
* This function allows the user to add default fields to include into all
* JSON log entries generated through this request logger. As this function
* attempt not to modify the provided fields object, it copies the field
* into a new object for safe keeping.
*
* @param {object} fields The dictionnary of additional fields to include
* by default for this instance of the
* RequestLogger.
*
* @returns {object} The previous set of default fields (can be
* safely ignored).
*/
addDefaultFields(fields) {
const oldFields = this.fields;
this.fields = objectCopy({}, this.fields, fields);
return oldFields;
}
/**
@ -142,7 +142,7 @@ class EndLogger extends DefaultFields {
* the global log level; and is used to track the log events for one given
* request.
*/
class RequestLogger extends DefaultFields {
class RequestLogger {
/**
* Constructor of the WereLogs Request Logger.
@ -179,7 +179,6 @@ class RequestLogger extends DefaultFields {
* @returns {undefined}
*/
constructor(logger, logLevel, dumpThreshold, endLevel, uids) {
super();
let uidList = undefined;
if (!LogLevel.shouldLog(dumpThreshold, logLevel)) {
@ -196,6 +195,7 @@ class RequestLogger extends DefaultFields {
this.uids = uidList || [generateUid()];
this.entries = [];
this.fields = {};
this.logLevel = logLevel;
this.dumpThreshold = dumpThreshold;
this.endLevel = endLevel;
@ -235,6 +235,25 @@ class RequestLogger extends DefaultFields {
return serializeUids(this.uids);
}
/**
* This function allows the user to add default fields to include into all
* JSON log entries generated through this request logger. As this function
* attempt not to modify the provided fields object, it copies the field
* into a new object for safe keeping.
*
* @param {object} fields The dictionnary of additional fields to include
* by default for this instance of the
* RequestLogger.
*
* @returns {object} The previous set of default fields (can be
* safely ignored).
*/
addDefaultFields(fields) {
const oldFields = this.fields;
this.fields = objectCopy({}, this.fields, fields);
return oldFields;
}
/**
* Logging function to write a trace-level log entry.
*
@ -416,7 +435,7 @@ class RequestLogger extends DefaultFields {
});
return;
}
const fields = objectCopy({}, this._dfGetFields(), logFields || {});
const fields = objectCopy({}, this.fields, logFields || {});
const endFlag = isEnd || false;
/*
@ -450,10 +469,6 @@ class RequestLogger extends DefaultFields {
} else if (LogLevel.shouldLog(level, this.logLevel)) {
this.doLogIO(logEntry);
}
if (isEnd) {
this.setParent(null);
}
}
/**

View File

@ -15,9 +15,8 @@ pass.on('data', data => {
logBuffer.records.push(data.toString());
});
function createModuleLogger(fields) {
const defaultFields = fields || 'FT-test';
return new Logger(defaultFields, {
function createModuleLogger() {
return new Logger('FT-test', {
level: 'info',
dump: 'error',
streams: [{
@ -106,35 +105,5 @@ describe('Werelogs is usable as a dependency', () => {
checkFields(fields);
done();
});
it('Should not log a removed field', done => {
const logger = createModuleLogger().newRequestLogger();
const msg = 'This is a message with no fields(removed)';
const fields = { errorCode: 0, description: 'TestNotFailing' };
logger.addDefaultFields(fields);
logger.removeDefaultFields(['errorCode', 'description']);
logger.info(msg);
assert.strictEqual(parseLogEntry().message, msg);
assert(!parseLogEntry().hasOwnProperty('errorCode'));
assert(!parseLogEntry().hasOwnProperty('description'));
done();
});
it('Should include the parent Loggers default fields', done => {
const mFields = {
name: 'TestModule',
submodule: 'functional',
};
const logger = createModuleLogger(mFields);
const rLog = logger.newRequestLogger();
const msg =
"This is a message including the module's default fields";
rLog.info(msg);
assert.strictEqual(parseLogEntry().message, msg);
assert.deepStrictEqual(parseLogEntry().name, mFields.name);
assert.deepStrictEqual(parseLogEntry().submodule,
mFields.submodule);
done();
});
});
});

View File

@ -1,515 +0,0 @@
'use strict'; // eslint-disable-line strict
const assert = require('assert');
const DefaultFields = require('../../lib/DefaultFields.js');
describe('class DefaultFields', () => {
describe('Basic Fields logic', () => {
it('Can get the resulting fields', done => {
const df = new DefaultFields();
const fields = df._dfGetFields();
assert(fields !== null && typeof fields === 'object');
done();
});
it('No fields are set by default', done => {
const df = new DefaultFields();
assert.deepStrictEqual(df._dfGetFields(), {});
done();
});
it('Validate defensive coding prevents undefined parentFields',
done => {
const df = new DefaultFields();
df._dfSetParentFields();
assert.deepStrictEqual(df._dfGetFields(), {});
done();
});
describe('With fields', () => {
let df = null;
const fields = {
name: 'Testing',
testing: true,
count: 4,
data: 'doggy',
};
beforeEach(done => {
df = new DefaultFields();
df.addDefaultFields(fields);
done();
});
it('Can add a dictionary of new fields', done => {
assert.deepStrictEqual(df._dfGetFields(), fields);
done();
});
it('Can remove a list of invalid fields', done => {
df.removeDefaultFields(['invalid', 'notthere']);
done();
});
it('Can remove a list of fields', done => {
df.removeDefaultFields(['data', 'count']);
assert.strictEqual(df._dfGetFields().data, undefined);
assert.strictEqual(df._dfGetFields().count, undefined);
assert.strictEqual(df._dfGetFields().name, 'Testing');
assert.strictEqual(df._dfGetFields().testing, true);
done();
});
it('Can reset the fields', done => {
df.resetDefaultFields();
assert.deepStrictEqual(df._dfGetFields(), {});
done();
});
});
});
describe('Basic Parent-Child logic', () => {
let parentNode = null;
beforeEach(done => {
parentNode = new DefaultFields();
done();
});
function linkedChild(pNode) {
const childNode = new DefaultFields();
childNode.setParent(pNode);
return childNode;
}
it('Can set a parent to a node', done => {
const childNode = linkedChild(parentNode);
assert.strictEqual(childNode.parent, parentNode);
assert.notStrictEqual(parentNode.children.indexOf(childNode), -1);
done();
});
it('Can remove a child from a node', done => {
const childNode = linkedChild(parentNode);
parentNode._dfUnregisterChild(childNode);
assert.strictEqual(parentNode.children.indexOf(childNode), -1);
done();
});
it('Can reset the parent of a node (w/ undefined)', done => {
const childNode = linkedChild(parentNode);
childNode.setParent(undefined);
assert.strictEqual(childNode.parent, null);
assert.strictEqual(parentNode.children.indexOf(childNode), -1);
done();
});
it('Can reset the parent of a node (w/ null)', done => {
const childNode = linkedChild(parentNode);
childNode.setParent(null);
assert.strictEqual(childNode.parent, null);
assert.strictEqual(parentNode.children.indexOf(childNode), -1);
done();
});
});
describe('Single parent-child relationship', () => {
let parentNode = null;
let childNode = null;
beforeEach(done => {
parentNode = new DefaultFields();
childNode = new DefaultFields();
childNode.setParent(parentNode);
done();
});
it('Child can define its own fields', done => {
const fields = {
child: true,
parent: false,
test: 1,
};
childNode.addDefaultFields(fields);
assert.deepStrictEqual(childNode._dfGetFields(), fields);
done();
});
it('Parent can define its own fields', done => {
const fields = {
child: false,
parent: true,
test: 2,
};
parentNode.addDefaultFields(fields);
assert.deepStrictEqual(parentNode._dfGetFields(), fields);
done();
});
it('Child inherits parents fields', done => {
const fields = {
child: true,
parent: false,
test: 3,
};
parentNode.addDefaultFields(fields);
assert.deepStrictEqual(childNode._dfGetFields(), fields);
done();
});
it('Child inherits successive parent field updates', done => {
const pFields1 = {
parent: true,
test: 4,
};
const rFields1 = {
parent: true,
test: 4,
};
const pFields2 = { child: false };
const rFields2 = {
parent: true,
test: 4,
child: false,
};
const pFields3 = {
data: 'pouet',
};
const rFields3 = {
parent: true,
test: 4,
child: false,
data: 'pouet',
};
parentNode.addDefaultFields(pFields1);
assert.deepStrictEqual(childNode._dfGetFields(), rFields1);
parentNode.addDefaultFields(pFields2);
assert.deepStrictEqual(childNode._dfGetFields(), rFields2);
parentNode.addDefaultFields(pFields3);
assert.deepStrictEqual(childNode._dfGetFields(), rFields3);
done();
});
it('Child inherits reset parent fields', done => {
const pFields = {
parent: true,
test: 5,
};
parentNode.addDefaultFields(pFields);
assert.deepStrictEqual(childNode._dfGetFields(), pFields);
parentNode.resetDefaultFields();
assert.deepStrictEqual(childNode._dfGetFields(), {});
done();
});
it('Child mixes parent and own fields', done => {
const pFields = { parent: true };
const cFields = {
child: true,
test: 6,
};
const rFields = {
parent: true,
child: true,
test: 6,
};
parentNode.addDefaultFields(pFields);
childNode.addDefaultFields(cFields);
assert.deepStrictEqual(childNode._dfGetFields(), rFields);
done();
});
it('Child overrides conflicting parent fields', done => {
const pFields = {
parent: true,
child: false,
test: 0,
};
const cFields = {
child: true,
test: 7,
};
const rFields = {
parent: true,
child: true,
test: 7,
};
parentNode.addDefaultFields(pFields);
childNode.addDefaultFields(cFields);
assert.deepStrictEqual(childNode._dfGetFields(), rFields);
done();
});
});
describe('Multiple-level parent-child relationship', () => {
let ggpNode = null;
let gpNode = null;
let pNode = null;
let childNode = null;
beforeEach(done => {
ggpNode = new DefaultFields();
gpNode = new DefaultFields();
gpNode.setParent(ggpNode);
pNode = new DefaultFields();
pNode.setParent(gpNode);
childNode = new DefaultFields();
childNode.setParent(pNode);
done();
});
it('Child inherits from whole hierarchy (newer -> older)',
done => {
const ggpFields = { great: true };
const gpFields = { grand: true };
const pFields = { parent: true };
const rFields = {
great: true,
grand: true,
parent: true,
};
pNode.addDefaultFields(pFields);
gpNode.addDefaultFields(gpFields);
ggpNode.addDefaultFields(ggpFields);
assert.deepStrictEqual(childNode._dfGetFields(), rFields);
done();
});
it('Child inherits from whole hierarchy (older -> newer)',
done => {
const ggpFields = { great: true };
const gpFields = { grand: true };
const pFields = { parent: true };
const rFields = {
great: true,
grand: true,
parent: true,
};
ggpNode.addDefaultFields(ggpFields);
gpNode.addDefaultFields(gpFields);
pNode.addDefaultFields(pFields);
assert.deepStrictEqual(childNode._dfGetFields(), rFields);
done();
});
it('Nodes inherit in-hierarchy fields reset', done => {
const ggpFields = { great: true };
const gpFields = { grand: true };
const pFields = { parent: true };
const rFields = {
great: true,
// grand: true, // Part 'reset'
parent: true,
};
ggpNode.addDefaultFields(ggpFields);
gpNode.addDefaultFields(gpFields);
pNode.addDefaultFields(pFields);
gpNode.resetDefaultFields();
assert.deepStrictEqual(childNode._dfGetFields(), rFields);
done();
});
it('Field overriding is cascading through generations (newer -> older)',
done => {
const ggpFields = { generation: 0 };
const gpFields = { generation: 1 };
const pFields = { generation: 2 };
const cFields = { generation: 3 };
childNode.addDefaultFields(cFields);
pNode.addDefaultFields(pFields);
gpNode.addDefaultFields(gpFields);
ggpNode.addDefaultFields(ggpFields);
assert.deepStrictEqual(childNode._dfGetFields(), cFields);
assert.deepStrictEqual(pNode._dfGetFields(), pFields);
assert.deepStrictEqual(gpNode._dfGetFields(), gpFields);
assert.deepStrictEqual(ggpNode._dfGetFields(), ggpFields);
done();
});
it('Field overriding is cascading through generations (older -> newer)',
done => {
const ggpFields = { generation: 0 };
const gpFields = { generation: 1 };
const pFields = { generation: 2 };
const cFields = { generation: 3 };
ggpNode.addDefaultFields(ggpFields);
gpNode.addDefaultFields(gpFields);
pNode.addDefaultFields(pFields);
childNode.addDefaultFields(cFields);
assert.deepStrictEqual(childNode._dfGetFields(), cFields);
assert.deepStrictEqual(pNode._dfGetFields(), pFields);
assert.deepStrictEqual(gpNode._dfGetFields(), gpFields);
assert.deepStrictEqual(ggpNode._dfGetFields(), ggpFields);
done();
});
it('Destroying intermediate level breaks relationships', done => {
const ggpFields = { ggp: 1 };
const gpFields = { gp: 1 };
const pFields = { p: 1 };
const cFields = { c: 1 };
ggpNode.addDefaultFields(ggpFields);
gpNode.addDefaultFields(gpFields);
pNode.addDefaultFields(pFields);
childNode.addDefaultFields(cFields);
pNode._dfDestroy();
assert.strictEqual(gpNode.children.indexOf(pNode), -1);
assert.strictEqual(pNode.parent, null);
assert.strictEqual(pNode.children.indexOf(childNode), -1);
assert.strictEqual(childNode.parent, null);
assert.deepStrictEqual(pNode._dfGetFields(), pFields);
assert.deepStrictEqual(childNode._dfGetFields(), cFields);
done();
});
it('Destroying intermediate level(2) breaks relationships', done => {
const ggpFields = { ggp: 1 };
const gpFields = { gp: 1 };
const pFields = { p: 1 };
const cFields = { c: 1 };
const rCFields = { p: 1, c: 1 };
ggpNode.addDefaultFields(ggpFields);
gpNode.addDefaultFields(gpFields);
pNode.addDefaultFields(pFields);
childNode.addDefaultFields(cFields);
gpNode._dfDestroy();
assert.strictEqual(ggpNode.children.indexOf(gpNode), -1);
assert.strictEqual(gpNode.parent, null);
assert.strictEqual(gpNode.children.indexOf(pNode), -1);
assert.strictEqual(pNode.parent, null);
assert.deepStrictEqual(gpNode._dfGetFields(), gpFields);
assert.deepStrictEqual(childNode._dfGetFields(), rCFields);
done();
});
});
describe('Topology changes', () => {
let ggpNode1 = null;
let ggpNode2 = null;
let gpNode1 = null;
let gpNode2 = null;
let pNode1 = null;
let pNode2 = null;
let cNode = null;
const ggp1Fields = { ggp1: true, generation: 0 };
const ggp2Fields = { ggp2: true, generation: 0 };
const gp1Fields = { gp1: true, generation: 1 };
const gp2Fields = { gp2: true, generation: 1 };
const p1Fields = { p1: true, generation: 2 };
const p2Fields = { p2: true, generation: 2 };
const cFields = { c: true, generation: 3 };
const startFields = {
ggp1: true,
gp1: true,
p1: true,
c: true,
generation: 3,
};
beforeEach(done => {
ggpNode1 = new DefaultFields();
gpNode1 = new DefaultFields();
gpNode1.setParent(ggpNode1);
pNode1 = new DefaultFields();
pNode1.setParent(gpNode1);
ggpNode2 = new DefaultFields();
gpNode2 = new DefaultFields();
gpNode2.setParent(ggpNode2);
pNode2 = new DefaultFields();
pNode2.setParent(gpNode2);
cNode = new DefaultFields();
cNode.setParent(pNode1);
ggpNode1.addDefaultFields(ggp1Fields);
ggpNode2.addDefaultFields(ggp2Fields);
gpNode1.addDefaultFields(gp1Fields);
gpNode2.addDefaultFields(gp2Fields);
pNode1.addDefaultFields(p1Fields);
pNode2.addDefaultFields(p2Fields);
cNode.addDefaultFields(cFields);
done();
});
it('Nodes are updated accordingly when a parent changes '
+ '(change whole upper hierarchy)',
done => {
const rFields = {
ggp2: true,
gp2: true,
p2: true,
c: true,
generation: 3,
};
assert.deepStrictEqual(cNode._dfGetFields(), startFields);
cNode.setParent(pNode2);
assert.deepStrictEqual(cNode._dfGetFields(), rFields);
done();
});
it('Nodes are updated accordingly when a parent changes '
+ '(change part of the upper hierarchy)',
done => {
const rFields = {
ggp2: true,
gp2: true,
p1: true,
c: true,
generation: 3,
};
assert.deepStrictEqual(cNode._dfGetFields(), startFields);
pNode1.setParent(gpNode2);
assert.deepStrictEqual(cNode._dfGetFields(), rFields);
done();
});
it('Nodes are updated accordingly when a parent changes '
+ '(shortcut hierarchy)',
done => {
const rFields = {
ggp2: true,
c: true,
generation: 3,
};
assert.deepStrictEqual(cNode._dfGetFields(), startFields);
cNode.setParent(ggpNode2);
assert.deepStrictEqual(cNode._dfGetFields(), rFields);
done();
});
it('Nodes are updated accordingly when a parent is unset '
+ '(remove whole hierarchy)',
done => {
const rFields = {
c: true,
generation: 3,
};
assert.deepStrictEqual(cNode._dfGetFields(), startFields);
cNode.setParent(null);
assert.deepStrictEqual(cNode._dfGetFields(), rFields);
done();
});
it('Nodes are updated accordingly when a parent is unset '
+ '(remove upper hierarchy)',
done => {
const rFields = {
gp1: true,
p1: true,
c: true,
generation: 3,
};
assert.deepStrictEqual(cNode._dfGetFields(), startFields);
gpNode1.setParent(null);
assert.deepStrictEqual(cNode._dfGetFields(), rFields);
done();
});
});
});

View File

@ -256,85 +256,3 @@ describe('Werelogs Module-level Logger can log as specified by the log level', (
it('Fatal level does not filter fatal level out', filterGenerator('fatal', 'fatal'));
});
/* eslint-enable no-multi-spaces, max-len */
describe('Werelogs Module-level Logger provide the DefaultFields logic', () => {
const loggerConfig = {
level: 'info',
dump: 'fatal',
end: 'info',
};
it('should not modify the object passed as a parameter', done => {
const defaultFields = {
name: 'TestDefaultFields1',
data: 0,
};
const add1 = {
attr1: 0,
};
const add2 = {
attr2: 'string',
};
const dummyLogger = new DummyLogger();
Config.simpleLogger = dummyLogger;
const logger = new Logger(defaultFields, loggerConfig);
logger.addDefaultFields(add1);
logger.addDefaultFields(add2);
assert.deepStrictEqual(add1, { attr1: 0 });
assert.deepStrictEqual(add2, { attr2: 'string' });
done();
});
it('should add one added default field to the log entries', done => {
const defaultFields = {
name: 'TestDefaultFields2',
data: 0,
};
const clientInfo = {
clientIP: '127.0.0.1',
};
const dummyLogger = new DummyLogger();
Config.simpleLogger = dummyLogger;
const logger = new Logger(defaultFields, loggerConfig);
logger.addDefaultFields(clientInfo);
logger.info('test message');
assert.strictEqual(dummyLogger.ops[0][1][0].clientIP,
clientInfo.clientIP);
done();
});
it('should add multiple added default fields to the log entries',
done => {
const defaultFields = {
name: 'TestDefaultFields3',
data: 0,
};
const clientInfo = {
clientIP: '127.0.0.1',
clientPort: '1337',
};
const requestInfo = {
object: '/tata/self.txt',
creator: 'Joddy',
};
const dummyLogger = new DummyLogger();
Config.simpleLogger = dummyLogger;
const logger = new Logger(defaultFields, loggerConfig);
logger.addDefaultFields(clientInfo);
logger.addDefaultFields(requestInfo);
logger.info('test message');
assert.strictEqual(dummyLogger.ops[0][1][0].clientIP,
clientInfo.clientIP);
assert.strictEqual(dummyLogger.ops[0][1][0].clientPort,
clientInfo.clientPort);
assert.strictEqual(dummyLogger.ops[0][1][0].object,
requestInfo.object);
assert.strictEqual(dummyLogger.ops[0][1][0].creator,
requestInfo.creator);
assert.strictEqual(dummyLogger.ops[0][1][0].name,
defaultFields.name);
assert.strictEqual(dummyLogger.ops[0][1][0].data,
defaultFields.data);
done();
});
});