Compare commits
9 Commits
developmen
...
ft/tests_s
Author | SHA1 | Date |
---|---|---|
Antonin Coulibaly | c75435ec8c | |
Antonin Coulibaly | 8ade0f71e9 | |
Antonin Coulibaly | 3268fa1cc7 | |
David Pineau | 6987e9031a | |
David Pineau | 42a8dcc968 | |
David Pineau | 812d2bec67 | |
David Pineau | f0391eb5d1 | |
David Pineau | 93291d55e4 | |
David Pineau | bb63fc629c |
|
@ -0,0 +1,247 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const Utils = require('./Utils.js');
|
||||
const objectCopy = Utils.objectCopy;
|
||||
|
||||
/**
|
||||
* This class manages the internal `default fields` for logging classes, with a
|
||||
* notion of parent/child relationships and hierarchy.
|
||||
* A Child element will inherit the fields from the Parent element, and
|
||||
* complete (or even override part of it) with its own fields. For instance,
|
||||
* a child can redefine (and thus override in the final result) a field that is
|
||||
* already defined by its parent.
|
||||
*
|
||||
* This class shall be used embedded within loggers, to provide the `Default
|
||||
* fields` and hierarchy logics.
|
||||
*
|
||||
* @private @property {DefaultFields} parent - The parent node in the
|
||||
* hierarchy
|
||||
* @private @property {DefaultFields[]} children - The list of children nodes
|
||||
* in the hierarchy
|
||||
* @private @property {Object} fields - The dictionary of fields
|
||||
* defined for this node
|
||||
* @private @property {Object} parentFields - The dictionary of fields
|
||||
* inherited by this node from
|
||||
* its parent node
|
||||
* @private @property {Object} precomputedFields - The prepared dictionary of
|
||||
* aggregated fields containing
|
||||
* the parent's fields
|
||||
* overriden with the node's
|
||||
* own fields. This is used as
|
||||
* a trick to reduce the
|
||||
* computational need.
|
||||
*
|
||||
* @mixin
|
||||
*/
|
||||
class DefaultFields {
|
||||
constructor() {
|
||||
this.parent = null;
|
||||
this.children = [];
|
||||
this.fields = {};
|
||||
this.parentFields = {};
|
||||
this.precomputedFields = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* This function allows to manually destroy a node, so that all references
|
||||
* to parents or children are released. This helps in avoiding the creation
|
||||
* of memory leaks due to our internal cyclic references that cannot be
|
||||
* automatically released due to the mechanics of GC in Node.JS, and its
|
||||
* shitty prevention of meaningful OOP designs.
|
||||
*
|
||||
* All hail << The Coming of the Great White Handkerchief >> that will
|
||||
* wipe this misery out of our lives !
|
||||
*
|
||||
* @returns {undefined}
|
||||
*/
|
||||
_dfDestroy() {
|
||||
this.children.forEach(child => {
|
||||
child.setParent(null);
|
||||
});
|
||||
this.setParent(null);
|
||||
this.children = [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a node to the list of children of the current node, effectively
|
||||
* making it one of the node's children.
|
||||
*
|
||||
* @param {DefaultFields} child - The node to add as a child of the current
|
||||
* node
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
_dfRegisterChild(child) {
|
||||
this.children.push(child);
|
||||
this._dfNotifyChild(child);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a node from the list of children of the current node, effectively
|
||||
* cutting off the relationship between the two.
|
||||
*
|
||||
* @param {DefaultFields} toRemove - The node to remove from the list of
|
||||
* children of the current node.
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
_dfUnregisterChild(toRemove) {
|
||||
this.children = this.children.filter(child => child !== toRemove);
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to notify one child node of an update of the node's
|
||||
* precomputed fields.
|
||||
*
|
||||
* @param {DefaultField} child - The child node to notify
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
_dfNotifyChild(child) {
|
||||
child._dfSetParentFields(this.precomputedFields);
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to notify every children node of an update of the
|
||||
* node's precomputed fields.
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
_dfNotifyChildren() {
|
||||
this.children.forEach(child => this._dfNotifyChild(child));
|
||||
}
|
||||
|
||||
/**
|
||||
* This function allows changing the Parent node of the current node,
|
||||
* consequently changing the resulting aggregation of the hierarchy of
|
||||
* fields. This can be used for a temporary switch of parent node.
|
||||
*
|
||||
* @param {DefaultFields} parent - The new parent node to set for the
|
||||
* current node
|
||||
*
|
||||
* @return {DefaultFields|null} The previous parent
|
||||
*/
|
||||
setParent(parent) {
|
||||
const oldParent = this.parent;
|
||||
if (parent === oldParent) {
|
||||
return oldParent;
|
||||
}
|
||||
if (oldParent) {
|
||||
oldParent._dfUnregisterChild(this);
|
||||
}
|
||||
this.parent = parent || null;
|
||||
if (this.parent) {
|
||||
this.parent._dfRegisterChild(this);
|
||||
} else {
|
||||
this._dfSetParentFields();
|
||||
}
|
||||
return oldParent;
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal function to partially recompute the precomputedFields through
|
||||
* inclusion of the newly defined fields into the precomputed ones. This
|
||||
* inclusion may override some already defined fields.
|
||||
*
|
||||
* This function shall always be the last one called when updating the
|
||||
* internal fields, as it also triggers the update of the children nodes.
|
||||
*
|
||||
* @param {Object} newFields - The dictionary of newFields to include into
|
||||
* the precomputedFields
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
_dfAugmentPrecomputedFields() {
|
||||
objectCopy(this.precomputedFields, this.fields);
|
||||
this._dfNotifyChildren();
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal function to update the fields provided by the parent node in
|
||||
* the DefaultFields hierarchy. It serves as a notification hook to refresh
|
||||
* the precomputed fields depending on the parent node's fields.
|
||||
* Two situations may lead to calling this function:
|
||||
* 1. The parent node's updated its preComputed fields and notified its
|
||||
* children, including the current node
|
||||
* 2. The node reset its own fields, and we must re-compute to remove
|
||||
* obsolete fields previously provided by the current node.
|
||||
*
|
||||
* @param {Object} parentFields - the precomputedFields from the parent node
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
_dfSetParentFields(parentFields) {
|
||||
this.parentFields = parentFields || {};
|
||||
this.precomputedFields = objectCopy({}, this.parentFields);
|
||||
this._dfAugmentPrecomputedFields();
|
||||
}
|
||||
|
||||
/**
|
||||
* This function allows to reset the fields managed by the DefaultFields
|
||||
* instance. It automatically triggers the re-computation of the
|
||||
* precomputed fields, cascading through the node and its children.
|
||||
*
|
||||
* /!\ This function may lead to an important use of the computational
|
||||
* resources if over-used.
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
resetDefaultFields() {
|
||||
const oldFields = this.fields;
|
||||
this.fields = {};
|
||||
// Automatically triggers the recomputation of precomputedFields
|
||||
this._dfSetParentFields(this.parentFields);
|
||||
return oldFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function allows the user to remove one or more items from the
|
||||
* defaultFields's dict.
|
||||
*
|
||||
* @param {String[]} fields - List of the names of the fields to be removed
|
||||
* from the internal dictionary of default
|
||||
* fields
|
||||
*
|
||||
* @return {Object} The previous set of default fields
|
||||
*/
|
||||
removeDefaultFields(fields) {
|
||||
const toRemove = {};
|
||||
fields.forEach(key => {
|
||||
toRemove[key] = undefined;
|
||||
});
|
||||
return this.addDefaultFields(toRemove);
|
||||
}
|
||||
|
||||
/**
|
||||
* This function allows the user to add default fields to include into all
|
||||
* JSON log entries generated through this request logger. As this function
|
||||
* attempt not to modify the provided fields object, it copies the field
|
||||
* into a new object for safe keeping.
|
||||
*
|
||||
* @param {Object} fields The dictionnary of additional fields to include
|
||||
* by default for this instance of the
|
||||
* RequestLogger.
|
||||
*
|
||||
* @return {Object} The previous set of default fields (can be
|
||||
* safely ignored).
|
||||
*/
|
||||
addDefaultFields(fields) {
|
||||
const oldFields = this.fields;
|
||||
this.fields = objectCopy({}, this.fields, fields);
|
||||
this._dfAugmentPrecomputedFields(fields);
|
||||
return oldFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* This function returns the node's precomputed fields, that includes all
|
||||
* of its hierarchy's fields and its own. This is intended to retrieve the
|
||||
* final form of the fields managed by the object.
|
||||
*
|
||||
* @return {Object} The precomputed fields to be added to a log entry
|
||||
*/
|
||||
_dfGetFields() {
|
||||
return this.precomputedFields;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = DefaultFields;
|
|
@ -2,17 +2,26 @@
|
|||
|
||||
const LogLevel = require('./LogLevel.js');
|
||||
const RequestLogger = require('./RequestLogger.js');
|
||||
const unserializeUids = require('./Utils.js').unserializeUids;
|
||||
const Utils = require('./Utils.js');
|
||||
const Config = require('./Config.js');
|
||||
const DefaultFields = require('./DefaultFields.js');
|
||||
|
||||
class Logger {
|
||||
const unserializeUids = Utils.unserializeUids;
|
||||
const objectCopy = Utils.objectCopy;
|
||||
|
||||
class Logger extends DefaultFields {
|
||||
|
||||
/**
|
||||
* This is the constructor of the Logger class. It takes optional
|
||||
* configuration parameters, that allow to modify its behavior.
|
||||
*
|
||||
* @param {string} name - The name of the Logger. It can be found later on
|
||||
* in the log entries.
|
||||
* @param {string|Object} fields - Deprecated: {string}: The name of the
|
||||
* Logger that will be included in the log
|
||||
* entries
|
||||
* Advised: {Object} A set of fields that
|
||||
* will be used as the default fields for
|
||||
* this Logger, and will be included in
|
||||
* all later log entries.
|
||||
*
|
||||
* @param {object} config - A configuration object for werelogs.
|
||||
* @param {string} config.level - The string name of the logging level
|
||||
|
@ -23,21 +32,24 @@ class Logger {
|
|||
* ('trace', 'debug', 'info', 'warn',
|
||||
* 'error' and 'fatal' in order of
|
||||
* importance.)
|
||||
* @param {object[]} config.streams - The array of streams into which to
|
||||
* log. Their configuration is directly
|
||||
* related to the expected bunyan
|
||||
* streams array, for compatibility
|
||||
* purposes (except that the 'level'
|
||||
* field is not accounted for)
|
||||
*
|
||||
* @see [Bunyan's documentation]{@link
|
||||
* https://github.com/trentm/node-bunyan/blob/master/README.md#streams} for
|
||||
* a more detailed description of the streams array configuration.
|
||||
*
|
||||
* @returns {undefined}
|
||||
*/
|
||||
constructor(name, config) {
|
||||
this.name = name;
|
||||
constructor(fields, config) {
|
||||
super();
|
||||
/* TODO XXX FIXME Remove starting at version 8.0 FIXME XXX TODO
|
||||
* vvvvvvvvvvvvvvvvvvvvvvvv */
|
||||
if (typeof fields === 'string') {
|
||||
this.addDefaultFields({ name: fields });
|
||||
} else if (typeof fields === 'object') {
|
||||
/* ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
* TODO XXX FIXME Remove starting at version 8.0 FIXME XXX TODO */
|
||||
this.addDefaultFields(fields);
|
||||
/* TODO XXX FIXME Remove starting at version 8.0 FIXME XXX TODO
|
||||
* vvvvvvvvvvvvvvvvvvvvvvvv */
|
||||
}
|
||||
/* ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
* TODO XXX FIXME Remove starting at version 8.0 FIXME XXX TODO */
|
||||
Config.update(config);
|
||||
}
|
||||
|
||||
|
@ -61,7 +73,7 @@ class Logger {
|
|||
const rLog = new RequestLogger(Config.logger,
|
||||
Config.level, Config.dump, Config.end,
|
||||
uids);
|
||||
rLog.addDefaultFields({ name: this.name });
|
||||
rLog.setParent(this);
|
||||
return rLog;
|
||||
}
|
||||
|
||||
|
@ -77,13 +89,12 @@ class Logger {
|
|||
const rLog = new RequestLogger(Config.logger,
|
||||
Config.level, Config.dump, Config.end,
|
||||
unserializeUids(serializedUids));
|
||||
rLog.addDefaultFields({ name: this.name });
|
||||
rLog.setParent(this);
|
||||
return rLog;
|
||||
}
|
||||
|
||||
_doLog(levelName, msg, data) {
|
||||
const sLogger = Config.logger;
|
||||
const finalData = { name: this.name, time: Date.now() };
|
||||
if (!LogLevel.shouldLog(levelName, Config.level)) {
|
||||
return;
|
||||
}
|
||||
|
@ -96,6 +107,9 @@ class Logger {
|
|||
+ ' This development error should be fixed ASAP.');
|
||||
return;
|
||||
}
|
||||
const finalData = objectCopy({},
|
||||
this._dfGetFields(),
|
||||
{ time: Date.now() });
|
||||
if (data) {
|
||||
Object.keys(data).forEach(k => {
|
||||
finalData[k] = data[k];
|
||||
|
|
|
@ -4,6 +4,7 @@ const assert = require('assert');
|
|||
|
||||
const LogLevel = require('./LogLevel.js');
|
||||
const Utils = require('./Utils.js');
|
||||
const DefaultFields = require('./DefaultFields.js');
|
||||
const serializeUids = Utils.serializeUids;
|
||||
const generateUid = Utils.generateUid;
|
||||
const objectCopy = Utils.objectCopy;
|
||||
|
@ -16,39 +17,38 @@ function ensureUidValidity(uid) {
|
|||
return uid;
|
||||
}
|
||||
|
||||
class EndLogger {
|
||||
/*
|
||||
* @mixes DefaultFields
|
||||
*/
|
||||
class EndLogger extends DefaultFields {
|
||||
constructor(reqLogger) {
|
||||
super();
|
||||
this.logger = reqLogger;
|
||||
this.fields = {};
|
||||
this.setParent(reqLogger);
|
||||
}
|
||||
|
||||
augmentedLog(level, msg, data) {
|
||||
assert.strictEqual(this.logger.elapsedTime, null, 'The logger\'s'
|
||||
+ 'end() wrapper should not be called more than'
|
||||
+ ' once.');
|
||||
// We can alter current instance, as it won't be usable after this
|
||||
// call.
|
||||
this.fields = objectCopy(this.fields, data || {});
|
||||
return this.logger.log(level, msg, this.fields, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* This function allows the user to add default fields to include into all
|
||||
* JSON log entries generated through this request logger. As this function
|
||||
* attempt not to modify the provided fields object, it copies the field
|
||||
* into a new object for safe keeping.
|
||||
const fields = objectCopy(this.precomputedFields, data || {});
|
||||
const res = this.logger.log(level, msg, fields, true);
|
||||
/* XXX NOTE
|
||||
*
|
||||
* @param {object} fields The dictionnary of additional fields to include
|
||||
* by default for this instance of the
|
||||
* RequestLogger.
|
||||
* This is our "manual" memory management.
|
||||
* Since child and parent `DefaultFields` are actually refering to each
|
||||
* other, the GC cannot release the child logger.
|
||||
* Also, we have no means to automatically unregister the child from
|
||||
* its parent, nor to have the parent notify the child of its update
|
||||
* without keeping those references. So here we go, manual memory
|
||||
* management in Garbage-collected languages !
|
||||
*
|
||||
* @returns {object} The previous set of default fields (can be
|
||||
* safely ignored).
|
||||
*/
|
||||
addDefaultFields(fields) {
|
||||
const oldFields = this.fields;
|
||||
this.fields = objectCopy({}, this.fields, fields);
|
||||
return oldFields;
|
||||
* All hail << The Coming of the Great White Handkerchief >> that will
|
||||
* wipe this misery out of our lives !
|
||||
*
|
||||
* XXX NOTE */
|
||||
this._dfDestroy();
|
||||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -142,7 +142,7 @@ class EndLogger {
|
|||
* the global log level; and is used to track the log events for one given
|
||||
* request.
|
||||
*/
|
||||
class RequestLogger {
|
||||
class RequestLogger extends DefaultFields {
|
||||
|
||||
/**
|
||||
* Constructor of the WereLogs Request Logger.
|
||||
|
@ -179,6 +179,7 @@ class RequestLogger {
|
|||
* @returns {undefined}
|
||||
*/
|
||||
constructor(logger, logLevel, dumpThreshold, endLevel, uids) {
|
||||
super();
|
||||
let uidList = undefined;
|
||||
|
||||
if (!LogLevel.shouldLog(dumpThreshold, logLevel)) {
|
||||
|
@ -195,7 +196,6 @@ class RequestLogger {
|
|||
this.uids = uidList || [generateUid()];
|
||||
|
||||
this.entries = [];
|
||||
this.fields = {};
|
||||
this.logLevel = logLevel;
|
||||
this.dumpThreshold = dumpThreshold;
|
||||
this.endLevel = endLevel;
|
||||
|
@ -235,25 +235,6 @@ class RequestLogger {
|
|||
return serializeUids(this.uids);
|
||||
}
|
||||
|
||||
/**
|
||||
* This function allows the user to add default fields to include into all
|
||||
* JSON log entries generated through this request logger. As this function
|
||||
* attempt not to modify the provided fields object, it copies the field
|
||||
* into a new object for safe keeping.
|
||||
*
|
||||
* @param {object} fields The dictionnary of additional fields to include
|
||||
* by default for this instance of the
|
||||
* RequestLogger.
|
||||
*
|
||||
* @returns {object} The previous set of default fields (can be
|
||||
* safely ignored).
|
||||
*/
|
||||
addDefaultFields(fields) {
|
||||
const oldFields = this.fields;
|
||||
this.fields = objectCopy({}, this.fields, fields);
|
||||
return oldFields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Logging function to write a trace-level log entry.
|
||||
*
|
||||
|
@ -435,7 +416,7 @@ class RequestLogger {
|
|||
});
|
||||
return;
|
||||
}
|
||||
const fields = objectCopy({}, this.fields, logFields || {});
|
||||
const fields = objectCopy({}, this._dfGetFields(), logFields || {});
|
||||
const endFlag = isEnd || false;
|
||||
|
||||
/*
|
||||
|
@ -469,6 +450,10 @@ class RequestLogger {
|
|||
} else if (LogLevel.shouldLog(level, this.logLevel)) {
|
||||
this.doLogIO(logEntry);
|
||||
}
|
||||
|
||||
if (isEnd) {
|
||||
this.setParent(null);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -15,8 +15,9 @@ pass.on('data', data => {
|
|||
logBuffer.records.push(data.toString());
|
||||
});
|
||||
|
||||
function createModuleLogger() {
|
||||
return new Logger('FT-test', {
|
||||
function createModuleLogger(fields) {
|
||||
const defaultFields = fields || 'FT-test';
|
||||
return new Logger(defaultFields, {
|
||||
level: 'info',
|
||||
dump: 'error',
|
||||
streams: [{
|
||||
|
@ -105,5 +106,35 @@ describe('Werelogs is usable as a dependency', () => {
|
|||
checkFields(fields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Should not log a removed field', done => {
|
||||
const logger = createModuleLogger().newRequestLogger();
|
||||
const msg = 'This is a message with no fields(removed)';
|
||||
const fields = { errorCode: 0, description: 'TestNotFailing' };
|
||||
logger.addDefaultFields(fields);
|
||||
logger.removeDefaultFields(['errorCode', 'description']);
|
||||
logger.info(msg);
|
||||
assert.strictEqual(parseLogEntry().message, msg);
|
||||
assert(!parseLogEntry().hasOwnProperty('errorCode'));
|
||||
assert(!parseLogEntry().hasOwnProperty('description'));
|
||||
done();
|
||||
});
|
||||
|
||||
it('Should include the parent Loggers default fields', done => {
|
||||
const mFields = {
|
||||
name: 'TestModule',
|
||||
submodule: 'functional',
|
||||
};
|
||||
const logger = createModuleLogger(mFields);
|
||||
const rLog = logger.newRequestLogger();
|
||||
const msg =
|
||||
"This is a message including the module's default fields";
|
||||
rLog.info(msg);
|
||||
assert.strictEqual(parseLogEntry().message, msg);
|
||||
assert.deepStrictEqual(parseLogEntry().name, mFields.name);
|
||||
assert.deepStrictEqual(parseLogEntry().submodule,
|
||||
mFields.submodule);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -0,0 +1,515 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const assert = require('assert');
|
||||
|
||||
const DefaultFields = require('../../lib/DefaultFields.js');
|
||||
|
||||
describe('class DefaultFields', () => {
|
||||
describe('Basic Fields logic', () => {
|
||||
it('Can get the resulting fields', done => {
|
||||
const df = new DefaultFields();
|
||||
const fields = df._dfGetFields();
|
||||
assert(fields !== null && typeof fields === 'object');
|
||||
done();
|
||||
});
|
||||
|
||||
it('No fields are set by default', done => {
|
||||
const df = new DefaultFields();
|
||||
assert.deepStrictEqual(df._dfGetFields(), {});
|
||||
done();
|
||||
});
|
||||
|
||||
it('Validate defensive coding prevents undefined parentFields',
|
||||
done => {
|
||||
const df = new DefaultFields();
|
||||
df._dfSetParentFields();
|
||||
assert.deepStrictEqual(df._dfGetFields(), {});
|
||||
done();
|
||||
});
|
||||
|
||||
describe('With fields', () => {
|
||||
let df = null;
|
||||
const fields = {
|
||||
name: 'Testing',
|
||||
testing: true,
|
||||
count: 4,
|
||||
data: 'doggy',
|
||||
};
|
||||
|
||||
beforeEach(done => {
|
||||
df = new DefaultFields();
|
||||
df.addDefaultFields(fields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Can add a dictionary of new fields', done => {
|
||||
assert.deepStrictEqual(df._dfGetFields(), fields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Can remove a list of invalid fields', done => {
|
||||
df.removeDefaultFields(['invalid', 'notthere']);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Can remove a list of fields', done => {
|
||||
df.removeDefaultFields(['data', 'count']);
|
||||
assert.strictEqual(df._dfGetFields().data, undefined);
|
||||
assert.strictEqual(df._dfGetFields().count, undefined);
|
||||
assert.strictEqual(df._dfGetFields().name, 'Testing');
|
||||
assert.strictEqual(df._dfGetFields().testing, true);
|
||||
done();
|
||||
});
|
||||
|
||||
|
||||
it('Can reset the fields', done => {
|
||||
df.resetDefaultFields();
|
||||
assert.deepStrictEqual(df._dfGetFields(), {});
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Basic Parent-Child logic', () => {
|
||||
let parentNode = null;
|
||||
|
||||
beforeEach(done => {
|
||||
parentNode = new DefaultFields();
|
||||
done();
|
||||
});
|
||||
|
||||
function linkedChild(pNode) {
|
||||
const childNode = new DefaultFields();
|
||||
childNode.setParent(pNode);
|
||||
return childNode;
|
||||
}
|
||||
|
||||
it('Can set a parent to a node', done => {
|
||||
const childNode = linkedChild(parentNode);
|
||||
assert.strictEqual(childNode.parent, parentNode);
|
||||
assert.notStrictEqual(parentNode.children.indexOf(childNode), -1);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Can remove a child from a node', done => {
|
||||
const childNode = linkedChild(parentNode);
|
||||
parentNode._dfUnregisterChild(childNode);
|
||||
assert.strictEqual(parentNode.children.indexOf(childNode), -1);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Can reset the parent of a node (w/ undefined)', done => {
|
||||
const childNode = linkedChild(parentNode);
|
||||
childNode.setParent(undefined);
|
||||
assert.strictEqual(childNode.parent, null);
|
||||
assert.strictEqual(parentNode.children.indexOf(childNode), -1);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Can reset the parent of a node (w/ null)', done => {
|
||||
const childNode = linkedChild(parentNode);
|
||||
childNode.setParent(null);
|
||||
assert.strictEqual(childNode.parent, null);
|
||||
assert.strictEqual(parentNode.children.indexOf(childNode), -1);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Single parent-child relationship', () => {
|
||||
let parentNode = null;
|
||||
let childNode = null;
|
||||
|
||||
beforeEach(done => {
|
||||
parentNode = new DefaultFields();
|
||||
childNode = new DefaultFields();
|
||||
childNode.setParent(parentNode);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Child can define its own fields', done => {
|
||||
const fields = {
|
||||
child: true,
|
||||
parent: false,
|
||||
test: 1,
|
||||
};
|
||||
childNode.addDefaultFields(fields);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), fields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Parent can define its own fields', done => {
|
||||
const fields = {
|
||||
child: false,
|
||||
parent: true,
|
||||
test: 2,
|
||||
};
|
||||
parentNode.addDefaultFields(fields);
|
||||
assert.deepStrictEqual(parentNode._dfGetFields(), fields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Child inherits parents fields', done => {
|
||||
const fields = {
|
||||
child: true,
|
||||
parent: false,
|
||||
test: 3,
|
||||
};
|
||||
parentNode.addDefaultFields(fields);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), fields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Child inherits successive parent field updates', done => {
|
||||
const pFields1 = {
|
||||
parent: true,
|
||||
test: 4,
|
||||
};
|
||||
const rFields1 = {
|
||||
parent: true,
|
||||
test: 4,
|
||||
};
|
||||
const pFields2 = { child: false };
|
||||
const rFields2 = {
|
||||
parent: true,
|
||||
test: 4,
|
||||
child: false,
|
||||
};
|
||||
const pFields3 = {
|
||||
data: 'pouet',
|
||||
};
|
||||
const rFields3 = {
|
||||
parent: true,
|
||||
test: 4,
|
||||
child: false,
|
||||
data: 'pouet',
|
||||
};
|
||||
parentNode.addDefaultFields(pFields1);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), rFields1);
|
||||
parentNode.addDefaultFields(pFields2);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), rFields2);
|
||||
parentNode.addDefaultFields(pFields3);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), rFields3);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Child inherits reset parent fields', done => {
|
||||
const pFields = {
|
||||
parent: true,
|
||||
test: 5,
|
||||
};
|
||||
parentNode.addDefaultFields(pFields);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), pFields);
|
||||
parentNode.resetDefaultFields();
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), {});
|
||||
done();
|
||||
});
|
||||
|
||||
it('Child mixes parent and own fields', done => {
|
||||
const pFields = { parent: true };
|
||||
const cFields = {
|
||||
child: true,
|
||||
test: 6,
|
||||
};
|
||||
const rFields = {
|
||||
parent: true,
|
||||
child: true,
|
||||
test: 6,
|
||||
};
|
||||
parentNode.addDefaultFields(pFields);
|
||||
childNode.addDefaultFields(cFields);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), rFields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Child overrides conflicting parent fields', done => {
|
||||
const pFields = {
|
||||
parent: true,
|
||||
child: false,
|
||||
test: 0,
|
||||
};
|
||||
const cFields = {
|
||||
child: true,
|
||||
test: 7,
|
||||
};
|
||||
const rFields = {
|
||||
parent: true,
|
||||
child: true,
|
||||
test: 7,
|
||||
};
|
||||
parentNode.addDefaultFields(pFields);
|
||||
childNode.addDefaultFields(cFields);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), rFields);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multiple-level parent-child relationship', () => {
|
||||
let ggpNode = null;
|
||||
let gpNode = null;
|
||||
let pNode = null;
|
||||
let childNode = null;
|
||||
|
||||
beforeEach(done => {
|
||||
ggpNode = new DefaultFields();
|
||||
gpNode = new DefaultFields();
|
||||
gpNode.setParent(ggpNode);
|
||||
pNode = new DefaultFields();
|
||||
pNode.setParent(gpNode);
|
||||
childNode = new DefaultFields();
|
||||
childNode.setParent(pNode);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Child inherits from whole hierarchy (newer -> older)',
|
||||
done => {
|
||||
const ggpFields = { great: true };
|
||||
const gpFields = { grand: true };
|
||||
const pFields = { parent: true };
|
||||
const rFields = {
|
||||
great: true,
|
||||
grand: true,
|
||||
parent: true,
|
||||
};
|
||||
pNode.addDefaultFields(pFields);
|
||||
gpNode.addDefaultFields(gpFields);
|
||||
ggpNode.addDefaultFields(ggpFields);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), rFields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Child inherits from whole hierarchy (older -> newer)',
|
||||
done => {
|
||||
const ggpFields = { great: true };
|
||||
const gpFields = { grand: true };
|
||||
const pFields = { parent: true };
|
||||
const rFields = {
|
||||
great: true,
|
||||
grand: true,
|
||||
parent: true,
|
||||
};
|
||||
ggpNode.addDefaultFields(ggpFields);
|
||||
gpNode.addDefaultFields(gpFields);
|
||||
pNode.addDefaultFields(pFields);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), rFields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Nodes inherit in-hierarchy fields reset', done => {
|
||||
const ggpFields = { great: true };
|
||||
const gpFields = { grand: true };
|
||||
const pFields = { parent: true };
|
||||
const rFields = {
|
||||
great: true,
|
||||
// grand: true, // Part 'reset'
|
||||
parent: true,
|
||||
};
|
||||
ggpNode.addDefaultFields(ggpFields);
|
||||
gpNode.addDefaultFields(gpFields);
|
||||
pNode.addDefaultFields(pFields);
|
||||
gpNode.resetDefaultFields();
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), rFields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Field overriding is cascading through generations (newer -> older)',
|
||||
done => {
|
||||
const ggpFields = { generation: 0 };
|
||||
const gpFields = { generation: 1 };
|
||||
const pFields = { generation: 2 };
|
||||
const cFields = { generation: 3 };
|
||||
childNode.addDefaultFields(cFields);
|
||||
pNode.addDefaultFields(pFields);
|
||||
gpNode.addDefaultFields(gpFields);
|
||||
ggpNode.addDefaultFields(ggpFields);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), cFields);
|
||||
assert.deepStrictEqual(pNode._dfGetFields(), pFields);
|
||||
assert.deepStrictEqual(gpNode._dfGetFields(), gpFields);
|
||||
assert.deepStrictEqual(ggpNode._dfGetFields(), ggpFields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Field overriding is cascading through generations (older -> newer)',
|
||||
done => {
|
||||
const ggpFields = { generation: 0 };
|
||||
const gpFields = { generation: 1 };
|
||||
const pFields = { generation: 2 };
|
||||
const cFields = { generation: 3 };
|
||||
ggpNode.addDefaultFields(ggpFields);
|
||||
gpNode.addDefaultFields(gpFields);
|
||||
pNode.addDefaultFields(pFields);
|
||||
childNode.addDefaultFields(cFields);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), cFields);
|
||||
assert.deepStrictEqual(pNode._dfGetFields(), pFields);
|
||||
assert.deepStrictEqual(gpNode._dfGetFields(), gpFields);
|
||||
assert.deepStrictEqual(ggpNode._dfGetFields(), ggpFields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Destroying intermediate level breaks relationships', done => {
|
||||
const ggpFields = { ggp: 1 };
|
||||
const gpFields = { gp: 1 };
|
||||
const pFields = { p: 1 };
|
||||
const cFields = { c: 1 };
|
||||
ggpNode.addDefaultFields(ggpFields);
|
||||
gpNode.addDefaultFields(gpFields);
|
||||
pNode.addDefaultFields(pFields);
|
||||
childNode.addDefaultFields(cFields);
|
||||
pNode._dfDestroy();
|
||||
assert.strictEqual(gpNode.children.indexOf(pNode), -1);
|
||||
assert.strictEqual(pNode.parent, null);
|
||||
assert.strictEqual(pNode.children.indexOf(childNode), -1);
|
||||
assert.strictEqual(childNode.parent, null);
|
||||
assert.deepStrictEqual(pNode._dfGetFields(), pFields);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), cFields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Destroying intermediate level(2) breaks relationships', done => {
|
||||
const ggpFields = { ggp: 1 };
|
||||
const gpFields = { gp: 1 };
|
||||
const pFields = { p: 1 };
|
||||
const cFields = { c: 1 };
|
||||
const rCFields = { p: 1, c: 1 };
|
||||
ggpNode.addDefaultFields(ggpFields);
|
||||
gpNode.addDefaultFields(gpFields);
|
||||
pNode.addDefaultFields(pFields);
|
||||
childNode.addDefaultFields(cFields);
|
||||
gpNode._dfDestroy();
|
||||
assert.strictEqual(ggpNode.children.indexOf(gpNode), -1);
|
||||
assert.strictEqual(gpNode.parent, null);
|
||||
assert.strictEqual(gpNode.children.indexOf(pNode), -1);
|
||||
assert.strictEqual(pNode.parent, null);
|
||||
assert.deepStrictEqual(gpNode._dfGetFields(), gpFields);
|
||||
assert.deepStrictEqual(childNode._dfGetFields(), rCFields);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Topology changes', () => {
|
||||
let ggpNode1 = null;
|
||||
let ggpNode2 = null;
|
||||
let gpNode1 = null;
|
||||
let gpNode2 = null;
|
||||
let pNode1 = null;
|
||||
let pNode2 = null;
|
||||
let cNode = null;
|
||||
|
||||
const ggp1Fields = { ggp1: true, generation: 0 };
|
||||
const ggp2Fields = { ggp2: true, generation: 0 };
|
||||
const gp1Fields = { gp1: true, generation: 1 };
|
||||
const gp2Fields = { gp2: true, generation: 1 };
|
||||
const p1Fields = { p1: true, generation: 2 };
|
||||
const p2Fields = { p2: true, generation: 2 };
|
||||
const cFields = { c: true, generation: 3 };
|
||||
|
||||
const startFields = {
|
||||
ggp1: true,
|
||||
gp1: true,
|
||||
p1: true,
|
||||
c: true,
|
||||
generation: 3,
|
||||
};
|
||||
|
||||
beforeEach(done => {
|
||||
ggpNode1 = new DefaultFields();
|
||||
gpNode1 = new DefaultFields();
|
||||
gpNode1.setParent(ggpNode1);
|
||||
pNode1 = new DefaultFields();
|
||||
pNode1.setParent(gpNode1);
|
||||
|
||||
ggpNode2 = new DefaultFields();
|
||||
gpNode2 = new DefaultFields();
|
||||
gpNode2.setParent(ggpNode2);
|
||||
pNode2 = new DefaultFields();
|
||||
pNode2.setParent(gpNode2);
|
||||
|
||||
cNode = new DefaultFields();
|
||||
cNode.setParent(pNode1);
|
||||
|
||||
ggpNode1.addDefaultFields(ggp1Fields);
|
||||
ggpNode2.addDefaultFields(ggp2Fields);
|
||||
gpNode1.addDefaultFields(gp1Fields);
|
||||
gpNode2.addDefaultFields(gp2Fields);
|
||||
pNode1.addDefaultFields(p1Fields);
|
||||
pNode2.addDefaultFields(p2Fields);
|
||||
cNode.addDefaultFields(cFields);
|
||||
|
||||
done();
|
||||
});
|
||||
|
||||
it('Nodes are updated accordingly when a parent changes '
|
||||
+ '(change whole upper hierarchy)',
|
||||
done => {
|
||||
const rFields = {
|
||||
ggp2: true,
|
||||
gp2: true,
|
||||
p2: true,
|
||||
c: true,
|
||||
generation: 3,
|
||||
};
|
||||
assert.deepStrictEqual(cNode._dfGetFields(), startFields);
|
||||
cNode.setParent(pNode2);
|
||||
assert.deepStrictEqual(cNode._dfGetFields(), rFields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Nodes are updated accordingly when a parent changes '
|
||||
+ '(change part of the upper hierarchy)',
|
||||
done => {
|
||||
const rFields = {
|
||||
ggp2: true,
|
||||
gp2: true,
|
||||
p1: true,
|
||||
c: true,
|
||||
generation: 3,
|
||||
};
|
||||
assert.deepStrictEqual(cNode._dfGetFields(), startFields);
|
||||
pNode1.setParent(gpNode2);
|
||||
assert.deepStrictEqual(cNode._dfGetFields(), rFields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Nodes are updated accordingly when a parent changes '
|
||||
+ '(shortcut hierarchy)',
|
||||
done => {
|
||||
const rFields = {
|
||||
ggp2: true,
|
||||
c: true,
|
||||
generation: 3,
|
||||
};
|
||||
assert.deepStrictEqual(cNode._dfGetFields(), startFields);
|
||||
cNode.setParent(ggpNode2);
|
||||
assert.deepStrictEqual(cNode._dfGetFields(), rFields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Nodes are updated accordingly when a parent is unset '
|
||||
+ '(remove whole hierarchy)',
|
||||
done => {
|
||||
const rFields = {
|
||||
c: true,
|
||||
generation: 3,
|
||||
};
|
||||
assert.deepStrictEqual(cNode._dfGetFields(), startFields);
|
||||
cNode.setParent(null);
|
||||
assert.deepStrictEqual(cNode._dfGetFields(), rFields);
|
||||
done();
|
||||
});
|
||||
|
||||
it('Nodes are updated accordingly when a parent is unset '
|
||||
+ '(remove upper hierarchy)',
|
||||
done => {
|
||||
const rFields = {
|
||||
gp1: true,
|
||||
p1: true,
|
||||
c: true,
|
||||
generation: 3,
|
||||
};
|
||||
assert.deepStrictEqual(cNode._dfGetFields(), startFields);
|
||||
gpNode1.setParent(null);
|
||||
assert.deepStrictEqual(cNode._dfGetFields(), rFields);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -256,3 +256,85 @@ describe('Werelogs Module-level Logger can log as specified by the log level', (
|
|||
it('Fatal level does not filter fatal level out', filterGenerator('fatal', 'fatal'));
|
||||
});
|
||||
/* eslint-enable no-multi-spaces, max-len */
|
||||
|
||||
describe('Werelogs Module-level Logger provide the DefaultFields logic', () => {
|
||||
const loggerConfig = {
|
||||
level: 'info',
|
||||
dump: 'fatal',
|
||||
end: 'info',
|
||||
};
|
||||
|
||||
it('should not modify the object passed as a parameter', done => {
|
||||
const defaultFields = {
|
||||
name: 'TestDefaultFields1',
|
||||
data: 0,
|
||||
};
|
||||
const add1 = {
|
||||
attr1: 0,
|
||||
};
|
||||
const add2 = {
|
||||
attr2: 'string',
|
||||
};
|
||||
const dummyLogger = new DummyLogger();
|
||||
Config.simpleLogger = dummyLogger;
|
||||
const logger = new Logger(defaultFields, loggerConfig);
|
||||
logger.addDefaultFields(add1);
|
||||
logger.addDefaultFields(add2);
|
||||
assert.deepStrictEqual(add1, { attr1: 0 });
|
||||
assert.deepStrictEqual(add2, { attr2: 'string' });
|
||||
done();
|
||||
});
|
||||
|
||||
it('should add one added default field to the log entries', done => {
|
||||
const defaultFields = {
|
||||
name: 'TestDefaultFields2',
|
||||
data: 0,
|
||||
};
|
||||
const clientInfo = {
|
||||
clientIP: '127.0.0.1',
|
||||
};
|
||||
const dummyLogger = new DummyLogger();
|
||||
Config.simpleLogger = dummyLogger;
|
||||
const logger = new Logger(defaultFields, loggerConfig);
|
||||
logger.addDefaultFields(clientInfo);
|
||||
logger.info('test message');
|
||||
assert.strictEqual(dummyLogger.ops[0][1][0].clientIP,
|
||||
clientInfo.clientIP);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should add multiple added default fields to the log entries',
|
||||
done => {
|
||||
const defaultFields = {
|
||||
name: 'TestDefaultFields3',
|
||||
data: 0,
|
||||
};
|
||||
const clientInfo = {
|
||||
clientIP: '127.0.0.1',
|
||||
clientPort: '1337',
|
||||
};
|
||||
const requestInfo = {
|
||||
object: '/tata/self.txt',
|
||||
creator: 'Joddy',
|
||||
};
|
||||
const dummyLogger = new DummyLogger();
|
||||
Config.simpleLogger = dummyLogger;
|
||||
const logger = new Logger(defaultFields, loggerConfig);
|
||||
logger.addDefaultFields(clientInfo);
|
||||
logger.addDefaultFields(requestInfo);
|
||||
logger.info('test message');
|
||||
assert.strictEqual(dummyLogger.ops[0][1][0].clientIP,
|
||||
clientInfo.clientIP);
|
||||
assert.strictEqual(dummyLogger.ops[0][1][0].clientPort,
|
||||
clientInfo.clientPort);
|
||||
assert.strictEqual(dummyLogger.ops[0][1][0].object,
|
||||
requestInfo.object);
|
||||
assert.strictEqual(dummyLogger.ops[0][1][0].creator,
|
||||
requestInfo.creator);
|
||||
assert.strictEqual(dummyLogger.ops[0][1][0].name,
|
||||
defaultFields.name);
|
||||
assert.strictEqual(dummyLogger.ops[0][1][0].data,
|
||||
defaultFields.data);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
Loading…
Reference in New Issue