Compare commits

..

10 Commits

Author SHA1 Message Date
Jordi Bertran de Balanda ac852727ba fix ft_test 2022-01-13 17:30:42 +01:00
Jordi Bertran de Balanda 151fde6a35 fix lint after eslint upgrade 2022-01-13 15:14:13 +01:00
Jordi Bertran de Balanda 2dc3ac6bb6 upgrade eslint for jest plugin 2022-01-13 15:14:13 +01:00
Jordi Bertran de Balanda 476da8ed62 more migrations 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda 27e2e2393c migrate Extension 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda ab06f6f7fb more migration 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda 62d66e89ac migrate LRUCache 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda 1fc0efeb78 deprecate leveldb support 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda 8c2e95e31b initial batch of migrations 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda e8bd68619d make index imports coherent 2022-01-12 18:35:33 +01:00
196 changed files with 4582 additions and 5011 deletions

View File

@ -1 +0,0 @@
{ "extends": "scality" }

7
.eslintrc.json Normal file
View File

@ -0,0 +1,7 @@
{
"extends": ["scality"],
"plugins": ["jest"],
"env": {
"jest/globals": true
}
}

View File

@ -3,6 +3,6 @@ module.exports = {
['@babel/preset-env', { targets: { node: 'current' } }], ['@babel/preset-env', { targets: { node: 'current' } }],
'@babel/preset-typescript', '@babel/preset-typescript',
], ],
plugins: ['add-module-exports'] plugins: ['add-module-exports'],
}; };

View File

@ -1,16 +1,15 @@
module.exports = { export default {
auth: require('./lib/auth/auth'), auth: require('./lib/auth/auth'),
constants: require('./lib/constants'), constants: require('./lib/constants'),
db: require('./lib/db'), errors: require('./lib/errors'),
errors: require('./lib/errors.js'),
errorUtils: require('./lib/errorUtils'), errorUtils: require('./lib/errorUtils'),
shuffle: require('./lib/shuffle'), shuffle: require('./lib/shuffle'),
stringHash: require('./lib/stringHash'), stringHash: require('./lib/stringHash'),
ipCheck: require('./lib/ipCheck'), ipCheck: require('./lib/ipCheck'),
jsutil: require('./lib/jsutil'), jsutil: require('./lib/jsutil'),
https: { https: {
ciphers: require('./lib/https/ciphers.js'), ciphers: require('./lib/https/ciphers'),
dhparam: require('./lib/https/dh2048.js'), dhparam: require('./lib/https/dh2048'),
}, },
algorithms: { algorithms: {
list: require('./lib/algos/list/exportAlgos'), list: require('./lib/algos/list/exportAlgos'),
@ -25,23 +24,23 @@ module.exports = {
}, },
}, },
policies: { policies: {
evaluators: require('./lib/policyEvaluator/evaluator.js'), evaluators: require('./lib/policyEvaluator/evaluator'),
validateUserPolicy: require('./lib/policy/policyValidator') validateUserPolicy: require('./lib/policy/policyValidator')
.validateUserPolicy, .validateUserPolicy,
evaluatePrincipal: require('./lib/policyEvaluator/principal'), evaluatePrincipal: require('./lib/policyEvaluator/principal'),
RequestContext: require('./lib/policyEvaluator/RequestContext.js'), RequestContext: require('./lib/policyEvaluator/RequestContext'),
requestUtils: require('./lib/policyEvaluator/requestUtils'), requestUtils: require('./lib/policyEvaluator/requestUtils'),
actionMaps: require('./lib/policyEvaluator/utils/actionMaps'), actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
}, },
Clustering: require('./lib/Clustering'), Clustering: require('./lib/Clustering'),
testing: { testing: {
matrix: require('./lib/testing/matrix.js'), matrix: require('./lib/testing/matrix'),
}, },
versioning: { versioning: {
VersioningConstants: require('./lib/versioning/constants.js') VersioningConstants: require('./lib/versioning/constants')
.VersioningConstants, .VersioningConstants,
Version: require('./lib/versioning/Version.js').Version, Version: require('./lib/versioning/Version').Version,
VersionID: require('./lib/versioning/VersionID.js'), VersionID: require('./lib/versioning/VersionID'),
}, },
network: { network: {
http: { http: {
@ -57,8 +56,8 @@ module.exports = {
probe: { probe: {
ProbeServer: require('./lib/network/probe/ProbeServer'), ProbeServer: require('./lib/network/probe/ProbeServer'),
HealthProbeServer: HealthProbeServer:
require('./lib/network/probe/HealthProbeServer.js'), require('./lib/network/probe/HealthProbeServer'),
Utils: require('./lib/network/probe/Utils.js'), Utils: require('./lib/network/probe/Utils'),
}, },
kmip: require('./lib/network/kmip'), kmip: require('./lib/network/kmip'),
kmipClient: require('./lib/network/kmip/Client'), kmipClient: require('./lib/network/kmip/Client'),

View File

@ -1,6 +1,7 @@
'use strict'; // eslint-disable-line 'use strict'; // eslint-disable-line
const cluster = require('cluster'); import * as cluster from 'cluster';
class Clustering { class Clustering {
/** /**
@ -12,20 +13,31 @@ class Clustering {
* releasing ressources * releasing ressources
* @return {Clustering} itself * @return {Clustering} itself
*/ */
constructor(size, logger, shutdownTimeout) { size: number;
this._size = size; shutdownTimeout: number;
logger: any; // TODO logger ???
shutdown: boolean;
workers: cluster.Worker[];
workersTimeout: NodeJS.Timeout[]; // TODO array of worker timeouts
workersStatus: number[];
status: number;
exitCb?: Function;
index?: number;
constructor(size: number, logger: any, shutdownTimeout=5000) {
if (size < 1) { if (size < 1) {
throw new Error('Cluster size must be greater than or equal to 1'); throw new Error('Cluster size must be greater than or equal to 1');
} }
this._shutdownTimeout = shutdownTimeout || 5000; this.size = size;
this._logger = logger; this.shutdownTimeout = shutdownTimeout || 5000;
this._shutdown = false; this.logger = logger;
this._workers = new Array(size).fill(undefined); this.shutdown = false;
this._workersTimeout = new Array(size).fill(undefined); this.workers = new Array(size).fill(undefined);
this._workersStatus = new Array(size).fill(undefined); this.workersTimeout = new Array(size).fill(undefined);
this._status = 0; this.workersStatus = new Array(size).fill(undefined);
this._exitCb = undefined; // Exit callback this.status = 0;
this._index = undefined; this.exitCb = undefined; // Exit callback
this.index = undefined;
} }
/** /**
@ -34,23 +46,23 @@ class Clustering {
* @private * @private
* @return {undefined} * @return {undefined}
*/ */
_afterStop() { _afterStop(): undefined {
// Asuming all workers shutdown gracefully // Asuming all workers shutdown gracefully
this._status = 0; this.status = 0;
const size = this._size; const size = this.size;
for (let i = 0; i < size; ++i) { for (let i = 0; i < size; ++i) {
// If the process return an error code or killed by a signal, // If the process return an error code or killed by a signal,
// set the status // set the status
if (typeof this._workersStatus[i] === 'number') { if (typeof this.workersStatus[i] === 'number') {
this._status = this._workersStatus[i]; this.status = this.workersStatus[i];
break; break;
} else if (typeof this._workersStatus[i] === 'string') { } else if (typeof this.workersStatus[i] === 'string') {
this._status = 1; this.status = 1;
break; break;
} }
} }
if (this._exitCb) { if (this.exitCb) {
return this._exitCb(this); return this.exitCb(this);
} }
return process.exit(this.getStatus()); return process.exit(this.getStatus());
} }
@ -64,45 +76,47 @@ class Clustering {
* @param {string} signal - Exit signal * @param {string} signal - Exit signal
* @return {undefined} * @return {undefined}
*/ */
_workerExited(worker, i, code, signal) { _workerExited(
worker: cluster.Worker, index: number, code: number, signal: number
): undefined {
// If the worker: // If the worker:
// - was killed by a signal // - was killed by a signal
// - return an error code // - return an error code
// - or just stopped // - or just stopped
if (signal) { if (signal) {
this._logger.info('Worker killed by signal', { this.logger.info('Worker killed by signal', {
signal, signal,
id: i, id: index,
childPid: worker.process.pid, childPid: worker.process.pid,
}); });
this._workersStatus[i] = signal; this.workersStatus[index] = signal;
} else if (code !== 0) { } else if (code !== 0) {
this._logger.error('Worker exit with code', { this.logger.error('Worker exit with code', {
code, code,
id: i, id: index,
childPid: worker.process.pid, childPid: worker.process.pid,
}); });
this._workersStatus[i] = code; this.workersStatus[index] = code;
} else { } else {
this._logger.info('Worker shutdown gracefully', { this.logger.info('Worker shutdown gracefully', {
id: i, id: index,
childPid: worker.process.pid, childPid: worker.process.pid,
}); });
this._workersStatus[i] = undefined; this.workersStatus[index] = undefined;
} }
this._workers[i] = undefined; this.workers[index] = undefined;
if (this._workersTimeout[i]) { if (this.workersTimeout[index]) {
clearTimeout(this._workersTimeout[i]); clearTimeout(this.workersTimeout[index]);
this._workersTimeout[i] = undefined; this.workersTimeout[index] = undefined;
} }
// If we don't trigger the stop method, the watchdog // If we don't trigger the stop method, the watchdog
// will autorestart the worker // will autorestart the worker
if (this._shutdown === false) { if (this.shutdown === false) {
return process.nextTick(() => this.startWorker(i)); return process.nextTick(() => this.startWorker(index));
} }
// Check if an worker is still running // Check if an worker is still running
if (!this._workers.every(cur => cur === undefined)) { if (!this.workers.every(cur => cur === undefined)) {
return undefined; return;
} }
return this._afterStop(); return this._afterStop();
} }
@ -113,26 +127,26 @@ class Clustering {
* @param {number} i Index of the starting worker * @param {number} i Index of the starting worker
* @return {undefined} * @return {undefined}
*/ */
startWorker(i) { startWorker(index: number): undefined {
if (!cluster.isMaster) { if (!cluster.isMaster) {
return; return;
} }
// Fork a new worker // Fork a new worker
this._workers[i] = cluster.fork(); this.workers[index] = cluster.fork();
// Listen for message from the worker // Listen for message from the worker
this._workers[i].on('message', msg => { this.workers[index].on('message', msg => {
// If the worker is ready, send him his id // If the worker is ready, send him his id
if (msg === 'ready') { if (msg === 'ready') {
this._workers[i].send({ msg: 'setup', id: i }); this.workers[index].send({ msg: 'setup', id: index });
} }
}); });
this._workers[i].on('exit', (code, signal) => this.workers[index].on('exit', (code, signal) =>
this._workerExited(this._workers[i], i, code, signal)); this._workerExited(this.workers[index], index, code, signal));
// Trigger when the worker was started // Trigger when the worker was started
this._workers[i].on('online', () => { this.workers[index].on('online', () => {
this._logger.info('Worker started', { this.logger.info('Worker started', {
id: i, id: index,
childPid: this._workers[i].process.pid, childPid: this.workers[index].process.pid,
}); });
}); });
} }
@ -143,8 +157,8 @@ class Clustering {
* @param {function} cb - Callback(Clustering, [exitSignal]) * @param {function} cb - Callback(Clustering, [exitSignal])
* @return {Clustering} Itself * @return {Clustering} Itself
*/ */
onExit(cb) { onExit(cb: Function): Clustering {
this._exitCb = cb; this.exitCb = cb;
return this; return this;
} }
@ -155,21 +169,21 @@ class Clustering {
* @param {function} cb - Callback to run the worker * @param {function} cb - Callback to run the worker
* @return {Clustering} itself * @return {Clustering} itself
*/ */
start(cb) { start(cb: Function): Clustering {
process.on('SIGINT', () => this.stop('SIGINT')); process.on('SIGINT', () => this.stop('SIGINT'));
process.on('SIGHUP', () => this.stop('SIGHUP')); process.on('SIGHUP', () => this.stop('SIGHUP'));
process.on('SIGQUIT', () => this.stop('SIGQUIT')); process.on('SIGQUIT', () => this.stop('SIGQUIT'));
process.on('SIGTERM', () => this.stop('SIGTERM')); process.on('SIGTERM', () => this.stop('SIGTERM'));
process.on('SIGPIPE', () => {}); process.on('SIGPIPE', () => {});
process.on('exit', (code, signal) => { process.on('exit', (code, signal) => {
if (this._exitCb) { if (this.exitCb) {
this._status = code || 0; this.status = code || 0;
return this._exitCb(this, signal); return this.exitCb(this, signal);
} }
return process.exit(code || 0); return process.exit(code || 0);
}); });
process.on('uncaughtException', err => { process.on('uncaughtException', err => {
this._logger.fatal('caught error', { this.logger.fatal('caught error', {
error: err.message, error: err.message,
stack: err.stack.split('\n').map(str => str.trim()), stack: err.stack.split('\n').map(str => str.trim()),
}); });
@ -180,7 +194,7 @@ class Clustering {
// know the id of the slave cluster // know the id of the slave cluster
process.on('message', msg => { process.on('message', msg => {
if (msg.msg === 'setup') { if (msg.msg === 'setup') {
this._index = msg.id; this.index = msg.id;
cb(this); cb(this);
} }
}); });
@ -188,7 +202,7 @@ class Clustering {
// the worker has started // the worker has started
process.send('ready'); process.send('ready');
} else { } else {
for (let i = 0; i < this._size; ++i) { for (let i = 0; i < this.size; ++i) {
this.startWorker(i); this.startWorker(i);
} }
} }
@ -200,8 +214,8 @@ class Clustering {
* *
* @return {Cluster.Worker[]} Workers * @return {Cluster.Worker[]} Workers
*/ */
getWorkers() { getWorkers(): cluster.Worker[] {
return this._workers; return this.workers;
} }
/** /**
@ -209,8 +223,8 @@ class Clustering {
* *
* @return {number} Status code * @return {number} Status code
*/ */
getStatus() { getStatus(): number {
return this._status; return this.status;
} }
/** /**
@ -218,8 +232,8 @@ class Clustering {
* *
* @return {boolean} - True if master, false otherwise * @return {boolean} - True if master, false otherwise
*/ */
isMaster() { isMaster(): boolean {
return this._index === undefined; return this.index === undefined;
} }
/** /**
@ -227,8 +241,8 @@ class Clustering {
* *
* @return {number|undefined} Worker index, undefined if it's master * @return {number|undefined} Worker index, undefined if it's master
*/ */
getIndex() { getIndex(): number {
return this._index; return this.index;
} }
/** /**
@ -237,22 +251,22 @@ class Clustering {
* @param {string} signal - Set internally when processes killed by signal * @param {string} signal - Set internally when processes killed by signal
* @return {undefined} * @return {undefined}
*/ */
stop(signal) { stop(signal: string): undefined {
if (!cluster.isMaster) { if (!cluster.isMaster) {
if (this._exitCb) { if (this.exitCb) {
return this._exitCb(this, signal); return this.exitCb(this, signal);
} }
return process.exit(0); return process.exit(0);
} }
this._shutdown = true; this.shutdown = true;
return this._workers.forEach((worker, i) => { return this.workers.forEach((worker, index) => {
if (!worker) { if (!worker) {
return undefined; return undefined;
} }
this._workersTimeout[i] = setTimeout(() => { this.workersTimeout[index] = setTimeout(() => {
// Kill the worker if the sigterm was ignored or take too long // Kill the worker if the sigterm was ignored or take too long
process.kill(worker.process.pid, 'SIGKILL'); process.kill(worker.process.pid, 'SIGKILL');
}, this._shutdownTimeout); }, this.shutdownTimeout);
// Send sigterm to the process, allowing to release ressources // Send sigterm to the process, allowing to release ressources
// and save some states // and save some states
return process.kill(worker.process.pid, 'SIGTERM'); return process.kill(worker.process.pid, 'SIGTERM');
@ -260,4 +274,4 @@ class Clustering {
} }
} }
module.exports = Clustering; export default Clustering;

View File

@ -1,4 +1,4 @@
const assert = require('assert'); import { strict as assert } from 'assert';
/** /**
* @class * @class
@ -12,9 +12,15 @@ class LRUCache {
* @param {number} maxEntries - maximum number of entries kept in * @param {number} maxEntries - maximum number of entries kept in
* the cache * the cache
*/ */
constructor(maxEntries) { maxEntries: number;
private entryCount: number;
private entryMap: object;
private lruHead: any; // TODO lruTrail?
private lruTail: any; // TODO lruTrail?
constructor(maxEntries: number) {
assert(maxEntries >= 1); assert(maxEntries >= 1);
this._maxEntries = maxEntries; this.maxEntries = maxEntries;
this.clear(); this.clear();
} }
@ -27,8 +33,8 @@ class LRUCache {
* @return {boolean} true if the cache contained an entry with * @return {boolean} true if the cache contained an entry with
* this key, false if it did not * this key, false if it did not
*/ */
add(key, value) { add(key: string, value: object): boolean {
let entry = this._entryMap[key]; let entry = this.entryMap[key];
if (entry) { if (entry) {
entry.value = value; entry.value = value;
// make the entry the most recently used by re-pushing it // make the entry the most recently used by re-pushing it
@ -37,15 +43,15 @@ class LRUCache {
this._lruPushEntry(entry); this._lruPushEntry(entry);
return true; return true;
} }
if (this._entryCount === this._maxEntries) { if (this.entryCount === this.maxEntries) {
// if the cache is already full, abide by the LRU strategy // if the cache is already full, abide by the LRU strategy
// and remove the least recently used entry from the cache // and remove the least recently used entry from the cache
// before pushing the new entry // before pushing the new entry
this._removeEntry(this._lruTail); this._removeEntry(this.lruTail);
} }
entry = { key, value }; entry = { key, value };
this._entryMap[key] = entry; this.entryMap[key] = entry;
this._entryCount += 1; this.entryCount += 1;
this._lruPushEntry(entry); this._lruPushEntry(entry);
return false; return false;
} }
@ -59,8 +65,8 @@ class LRUCache {
* exists in the cache, or undefined if not found - either if the * exists in the cache, or undefined if not found - either if the
* key was never added or if it has been evicted from the cache. * key was never added or if it has been evicted from the cache.
*/ */
get(key) { get(key: string): object | undefined{
const entry = this._entryMap[key]; const entry = this.entryMap[key];
if (entry) { if (entry) {
// make the entry the most recently used by re-pushing it // make the entry the most recently used by re-pushing it
// to the head of the LRU list // to the head of the LRU list
@ -79,8 +85,8 @@ class LRUCache {
* there was no entry with this key in the cache - either if the * there was no entry with this key in the cache - either if the
* key was never added or if it has been evicted from the cache. * key was never added or if it has been evicted from the cache.
*/ */
remove(key) { remove(key: string): boolean {
const entry = this._entryMap[key]; const entry = this.entryMap[key];
if (entry) { if (entry) {
this._removeEntry(entry); this._removeEntry(entry);
return true; return true;
@ -93,8 +99,8 @@ class LRUCache {
* *
* @return {number} current number of cached entries * @return {number} current number of cached entries
*/ */
count() { count(): number {
return this._entryCount; return this.entryCount;
} }
/** /**
@ -102,11 +108,11 @@ class LRUCache {
* *
* @return {undefined} * @return {undefined}
*/ */
clear() { clear(): undefined {
this._entryMap = {}; this.entryMap = {};
this._entryCount = 0; this.entryCount = 0;
this._lruHead = null; this.lruHead = null;
this._lruTail = null; this.lruTail = null;
} }
/** /**
@ -116,16 +122,16 @@ class LRUCache {
* @param {object} entry - entry to push * @param {object} entry - entry to push
* @return {undefined} * @return {undefined}
*/ */
_lruPushEntry(entry) { _lruPushEntry(entry: object): undefined {
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
entry._lruNext = this._lruHead; entry._lruNext = this.lruHead;
entry._lruPrev = null; entry._lruPrev = null;
if (this._lruHead) { if (this.lruHead) {
this._lruHead._lruPrev = entry; this.lruHead._lruPrev = entry;
} }
this._lruHead = entry; this.lruHead = entry;
if (!this._lruTail) { if (!this.lruTail) {
this._lruTail = entry; this.lruTail = entry;
} }
/* eslint-enable no-param-reassign */ /* eslint-enable no-param-reassign */
} }
@ -136,17 +142,17 @@ class LRUCache {
* @param {object} entry - entry to remove * @param {object} entry - entry to remove
* @return {undefined} * @return {undefined}
*/ */
_lruRemoveEntry(entry) { _lruRemoveEntry(entry): undefined {
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
if (entry._lruPrev) { if (entry._lruPrev) {
entry._lruPrev._lruNext = entry._lruNext; entry._lruPrev._lruNext = entry._lruNext;
} else { } else {
this._lruHead = entry._lruNext; this.lruHead = entry._lruNext;
} }
if (entry._lruNext) { if (entry._lruNext) {
entry._lruNext._lruPrev = entry._lruPrev; entry._lruNext._lruPrev = entry._lruPrev;
} else { } else {
this._lruTail = entry._lruPrev; this.lruTail = entry._lruPrev;
} }
/* eslint-enable no-param-reassign */ /* eslint-enable no-param-reassign */
} }
@ -157,11 +163,11 @@ class LRUCache {
* @param {object} entry - cache entry to remove * @param {object} entry - cache entry to remove
* @return {undefined} * @return {undefined}
*/ */
_removeEntry(entry) { _removeEntry(entry: object): undefined {
this._lruRemoveEntry(entry); this._lruRemoveEntry(entry);
delete this._entryMap[entry.key]; delete this.entryMap[entry.key];
this._entryCount -= 1; this.entryCount -= 1;
} }
} }
module.exports = LRUCache; export default LRUCache;

View File

@ -1,6 +1,6 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const { FILTER_SKIP, SKIP_NONE } = require('./tools'); import { FILTER_SKIP, SKIP_NONE } from './tools';
// Use a heuristic to amortize the cost of JSON // Use a heuristic to amortize the cost of JSON
// serialization/deserialization only on largest metadata where the // serialization/deserialization only on largest metadata where the
@ -22,7 +22,7 @@ const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
/** /**
* Base class of listing extensions. * Base class of listing extensions.
*/ */
class Extension { export class Extension {
/** /**
* This takes a list of parameters and a logger as the inputs. * This takes a list of parameters and a logger as the inputs.
* Derivatives should have their own format regarding parameters. * Derivatives should have their own format regarding parameters.
@ -31,7 +31,13 @@ class Extension {
* @param {RequestLogger} logger - the logger * @param {RequestLogger} logger - the logger
* @constructor * @constructor
*/ */
constructor(parameters, logger) {
parameters: any;
logger: any;
res: any;
keys: number;
constructor(parameters: any, logger: any) {
// inputs // inputs
this.parameters = parameters; this.parameters = parameters;
this.logger = logger; this.logger = logger;
@ -51,7 +57,7 @@ class Extension {
* heavy unused fields, or left untouched (depending on size * heavy unused fields, or left untouched (depending on size
* heuristics) * heuristics)
*/ */
trimMetadata(value) { trimMetadata(value: string): string {
let ret = undefined; let ret = undefined;
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) { if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
try { try {
@ -81,7 +87,7 @@ class Extension {
* *
* @return {object} - listing parameters for metadata * @return {object} - listing parameters for metadata
*/ */
genMDParams() { genMDParams(): object {
return {}; return {};
} }
@ -96,7 +102,7 @@ class Extension {
* = 0: entry is accepted but not included (skipping) * = 0: entry is accepted but not included (skipping)
* < 0: entry is not accepted, listing should finish * < 0: entry is not accepted, listing should finish
*/ */
filter(entry) { filter(entry: any): number {
return entry ? FILTER_SKIP : FILTER_SKIP; return entry ? FILTER_SKIP : FILTER_SKIP;
} }
@ -108,7 +114,7 @@ class Extension {
* @return {string} - the insight: a common prefix or a master key, * @return {string} - the insight: a common prefix or a master key,
* or SKIP_NONE if there is no insight * or SKIP_NONE if there is no insight
*/ */
skipping() { skipping(): string {
return SKIP_NONE; return SKIP_NONE;
} }
@ -116,9 +122,7 @@ class Extension {
* Get the listing resutls. Format depends on derivatives' specific logic. * Get the listing resutls. Format depends on derivatives' specific logic.
* @return {Array} - The listed elements * @return {Array} - The listed elements
*/ */
result() { result(): any {
return this.res; return this.res;
} }
} }
module.exports.default = Extension;

View File

@ -1,16 +1,37 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const { inc, checkLimit, listingParamsMasterKeysV0ToV1, import { inc, checkLimit, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT } = require('./tools'); FILTER_END, FILTER_ACCEPT } from './tools';
import { VersioningConstants as VSConst} from '../../versioning/constants';
const DEFAULT_MAX_KEYS = 1000; const DEFAULT_MAX_KEYS = 1000;
const VSConst = require('../../versioning/constants').VersioningConstants;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
function numberDefault(num, defaultNum) { function numberDefault(num: string, defaultNum: number): number {
const parsedNum = Number.parseInt(num, 10); const parsedNum = Number.parseInt(num, 10);
return Number.isNaN(parsedNum) ? defaultNum : parsedNum; return Number.isNaN(parsedNum) ? defaultNum : parsedNum;
} }
interface MPUParams {
delimiter: any;
splitter: any;
prefix: any; // TODO type
uploadIdMarker: any; // TODO type
maxKeys: string;
queryPrefixLength: string;
keyMarker?: any; // TODO type
}
interface V0Params {
gt?: string;
gte?: string;
lt?: string;
lte?: string;
}
/** /**
* Class for the MultipartUploads extension * Class for the MultipartUploads extension
*/ */
@ -23,7 +44,22 @@ class MultipartUploads {
* @param {String} [vFormat] - versioning key format * @param {String} [vFormat] - versioning key format
* @return {undefined} * @return {undefined}
*/ */
constructor(params, logger, vFormat) { params: MPUParams; // TODO param type
vFormat: string; // TODO vFormat type
CommonPrefixes: any[]; // TODO commonPrefixes type
Uploads: any[]; // TODO type
IsTruncated: boolean;
NextKeyMarker: string;
NextUploadIdMarker: string;
prefixLength: number;
queryPrefixLength: number;
keys: number;
maxKeys: number;
delimiter: any; // TODO type
splitter: any; // TODO type
logger: any // TODO type
constructor(params: MPUParams, logger: any, vFormat: string) {
this.params = params; this.params = params;
this.vFormat = vFormat || BucketVersioningKeyFormat.v0; this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
this.CommonPrefixes = []; this.CommonPrefixes = [];
@ -51,8 +87,8 @@ class MultipartUploads {
}[this.vFormat]); }[this.vFormat]);
} }
genMDParamsV0() { genMDParamsV0(): V0Params {
const params = {}; const params: V0Params = {};
if (this.params.keyMarker) { if (this.params.keyMarker) {
params.gt = `overview${this.params.splitter}` + params.gt = `overview${this.params.splitter}` +
`${this.params.keyMarker}${this.params.splitter}`; `${this.params.keyMarker}${this.params.splitter}`;
@ -74,6 +110,7 @@ class MultipartUploads {
} }
genMDParamsV1() { genMDParamsV1() {
// TODO v1 params definition
const v0params = this.genMDParamsV0(); const v0params = this.genMDParamsV0();
return listingParamsMasterKeysV0ToV1(v0params); return listingParamsMasterKeysV0ToV1(v0params);
} }
@ -85,7 +122,7 @@ class MultipartUploads {
* @param {String} value - The value of the key * @param {String} value - The value of the key
* @return {undefined} * @return {undefined}
*/ */
addUpload(value) { addUpload(value: string): undefined {
const tmp = JSON.parse(value); const tmp = JSON.parse(value);
this.Uploads.push({ this.Uploads.push({
key: tmp.key, key: tmp.key,
@ -114,7 +151,7 @@ class MultipartUploads {
* @param {String} commonPrefix - The commonPrefix to add * @param {String} commonPrefix - The commonPrefix to add
* @return {undefined} * @return {undefined}
*/ */
addCommonPrefix(commonPrefix) { addCommonPrefix(commonPrefix: string): undefined {
if (this.CommonPrefixes.indexOf(commonPrefix) === -1) { if (this.CommonPrefixes.indexOf(commonPrefix) === -1) {
this.CommonPrefixes.push(commonPrefix); this.CommonPrefixes.push(commonPrefix);
this.NextKeyMarker = commonPrefix; this.NextKeyMarker = commonPrefix;
@ -122,11 +159,11 @@ class MultipartUploads {
} }
} }
getObjectKeyV0(obj) { getObjectKeyV0(obj: any) { // TODO this is an Upload value
return obj.key; return obj.key;
} }
getObjectKeyV1(obj) { getObjectKeyV1(obj: any) { // TODO this is an Upload value
return obj.key.slice(DbPrefixes.Master.length); return obj.key.slice(DbPrefixes.Master.length);
} }
@ -135,14 +172,14 @@ class MultipartUploads {
* @param {String} obj - The key and value of the element * @param {String} obj - The key and value of the element
* @return {number} - > 0: Continue, < 0: Stop * @return {number} - > 0: Continue, < 0: Stop
*/ */
filter(obj) { filter(obj: any): number {
// Check first in case of maxkeys = 0 // Check first in case of maxkeys = 0
if (this.keys >= this.maxKeys) { if (this.keys >= this.maxKeys) {
// In cases of maxKeys <= 0 => IsTruncated = false // In cases of maxKeys <= 0 => IsTruncated = false
this.IsTruncated = this.maxKeys > 0; this.IsTruncated = this.maxKeys > 0;
return FILTER_END; return FILTER_END;
} }
const key = this.getObjectKey(obj); const key = this.getObjectKey(obj); // TODO this is actually valid - see ctor
const value = obj.value; const value = obj.value;
if (this.delimiter) { if (this.delimiter) {
const mpuPrefixSlice = `overview${this.splitter}`.length; const mpuPrefixSlice = `overview${this.splitter}`.length;
@ -162,7 +199,7 @@ class MultipartUploads {
return FILTER_ACCEPT; return FILTER_ACCEPT;
} }
skipping() { skipping(): string {
return ''; return '';
} }
@ -170,7 +207,7 @@ class MultipartUploads {
* Returns the formatted result * Returns the formatted result
* @return {Object} - The result. * @return {Object} - The result.
*/ */
result() { result(): object {
return { return {
CommonPrefixes: this.CommonPrefixes, CommonPrefixes: this.CommonPrefixes,
Uploads: this.Uploads, Uploads: this.Uploads,
@ -183,6 +220,7 @@ class MultipartUploads {
} }
} }
module.exports = { export {
MultipartUploads, MultipartUploads,
}; MPUParams
}

View File

@ -1,14 +1,27 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const Extension = require('./Extension').default; import { Extension } from './Extension';
const { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools'); import { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } from './tools';
const DEFAULT_MAX_KEYS = 10000; const DEFAULT_MAX_KEYS = 10000;
interface ListParams {
maxKeys: number;
filterKey: any; // TODO type
filterKeyStartsWith: any; // TODO type
}
/** /**
* Class of an extension doing the simple listing * Class of an extension doing the simple listing
*/ */
class List extends Extension { class List extends Extension {
maxKeys: number;
filterKey: any;
filterKeyStartsWith: any;
/** /**
* Constructor * Constructor
* Set the logger and the res * Set the logger and the res
@ -16,7 +29,7 @@ class List extends Extension {
* @param {RequestLogger} logger - The logger of the request * @param {RequestLogger} logger - The logger of the request
* @return {undefined} * @return {undefined}
*/ */
constructor(parameters, logger) { constructor(parameters: ListParams, logger: any) {
super(parameters, logger); super(parameters, logger);
this.res = []; this.res = [];
if (parameters) { if (parameters) {
@ -29,7 +42,7 @@ class List extends Extension {
this.keys = 0; this.keys = 0;
} }
genMDParams() { genMDParams(): object {
const params = this.parameters ? { const params = this.parameters ? {
gt: this.parameters.gt, gt: this.parameters.gt,
gte: this.parameters.gte || this.parameters.start, gte: this.parameters.gte || this.parameters.start,
@ -53,7 +66,7 @@ class List extends Extension {
* *
* @return {Boolean} Returns true if matches, else false. * @return {Boolean} Returns true if matches, else false.
*/ */
customFilter(value) { customFilter(value: string): boolean {
let _value; let _value;
try { try {
_value = JSON.parse(value); _value = JSON.parse(value);
@ -90,7 +103,7 @@ class List extends Extension {
* @return {number} - > 0 : continue listing * @return {number} - > 0 : continue listing
* < 0 : listing done * < 0 : listing done
*/ */
filter(elem) { filter(elem: object): number {
// Check first in case of maxkeys <= 0 // Check first in case of maxkeys <= 0
if (this.keys >= this.maxKeys) { if (this.keys >= this.maxKeys) {
return FILTER_END; return FILTER_END;
@ -117,7 +130,7 @@ class List extends Extension {
* Function returning the result * Function returning the result
* @return {Array} - The listed elements * @return {Array} - The listed elements
*/ */
result() { result(): any[] {
return this.res; return this.res;
} }
} }

View File

@ -1,9 +1,9 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const Extension = require('./Extension').default; import { Extension } from './Extension';
const { inc, listingParamsMasterKeysV0ToV1, import { inc, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools'); FILTER_END, FILTER_ACCEPT, FILTER_SKIP } from './tools';
const VSConst = require('../../versioning/constants').VersioningConstants; import { VersioningConstants as VSConst } from '../../versioning/constants';
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
/** /**
@ -14,10 +14,41 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
* @param {Number} delimiterIndex - 'folder' index in the path * @param {Number} delimiterIndex - 'folder' index in the path
* @return {String} - CommonPrefix * @return {String} - CommonPrefix
*/ */
function getCommonPrefix(key, delimiter, delimiterIndex) { function getCommonPrefix(
key: string, delimiter: string, delimiterIndex: number
): string {
return key.substring(0, delimiterIndex + delimiter.length); return key.substring(0, delimiterIndex + delimiter.length);
} }
interface DelimiterParams {
delimiter: string;
prefix: string;
marker: string;
maxKeys: number;
v2: boolean;
startAfter: string;
continuationToken: string;
alphabeticalOrder: boolean;
}
interface DelimiterContentItem {
key: string;
value: string;
}
interface DelimiterResult {
CommonPrefixes: string[];
Contents: DelimiterContentItem[]; // TODO type this.Contents,
IsTruncated: boolean;
Delimiter: string;
NextMarker?: any; // TODO type
NextContinuationToken?: any; // TODO type
}
/** /**
* Handle object listing with parameters * Handle object listing with parameters
* *
@ -55,7 +86,25 @@ class Delimiter extends Extension {
* request * request
* @param {String} [vFormat] - versioning key format * @param {String} [vFormat] - versioning key format
*/ */
constructor(parameters, logger, vFormat) {
delimiter: string;
prefix: string;
marker: string;
maxKeys: number;
startAfter: string;
continuationToken: string;
alphabeticalOrder: boolean;
vFormat: string;
CommonPrefixes: string[];
Contents: DelimiterContentItem[];
IsTruncated: boolean;
NextMarker: string;
NextContinuationToken: string;
startMarker: string;
continueMarker: string;
nextContinueMarker: string;
constructor(parameters: DelimiterParams, logger: any, vFormat: string) {
super(parameters, logger); super(parameters, logger);
// original listing parameters // original listing parameters
this.delimiter = parameters.delimiter; this.delimiter = parameters.delimiter;
@ -134,7 +183,7 @@ class Delimiter extends Extension {
* final state of the result if it is the case * final state of the result if it is the case
* @return {Boolean} - indicates if the iteration has to stop * @return {Boolean} - indicates if the iteration has to stop
*/ */
_reachedMaxKeys() { _reachedMaxKeys(): boolean {
if (this.keys >= this.maxKeys) { if (this.keys >= this.maxKeys) {
// In cases of maxKeys <= 0 -> IsTruncated = false // In cases of maxKeys <= 0 -> IsTruncated = false
this.IsTruncated = this.maxKeys > 0; this.IsTruncated = this.maxKeys > 0;
@ -151,7 +200,7 @@ class Delimiter extends Extension {
* @param {String} value - The value of the key * @param {String} value - The value of the key
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
addContents(key, value) { addContents(key: string, value: string): number {
if (this._reachedMaxKeys()) { if (this._reachedMaxKeys()) {
return FILTER_END; return FILTER_END;
} }
@ -180,7 +229,7 @@ class Delimiter extends Extension {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filter(obj) { filter(obj): number {
const key = this.getObjectKey(obj); const key = this.getObjectKey(obj);
const value = obj.value; const value = obj.value;
if ((this.prefix && !key.startsWith(this.prefix)) if ((this.prefix && !key.startsWith(this.prefix))
@ -206,7 +255,7 @@ class Delimiter extends Extension {
* @param {Number} index - after prefix starting point * @param {Number} index - after prefix starting point
* @return {Boolean} - indicates if iteration should continue * @return {Boolean} - indicates if iteration should continue
*/ */
addCommonPrefix(key, index) { addCommonPrefix(key: string, index: number) {
const commonPrefix = getCommonPrefix(key, this.delimiter, index); const commonPrefix = getCommonPrefix(key, this.delimiter, index);
if (this.CommonPrefixes.indexOf(commonPrefix) === -1 if (this.CommonPrefixes.indexOf(commonPrefix) === -1
&& this[this.nextContinueMarker] !== commonPrefix) { && this[this.nextContinueMarker] !== commonPrefix) {
@ -228,7 +277,7 @@ class Delimiter extends Extension {
* @return {string} - the present range (NextMarker) if repd believes * @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on * that it's enough and should move on
*/ */
skippingV0() { skippingV0(): string {
return this[this.nextContinueMarker]; return this[this.nextContinueMarker];
} }
@ -239,7 +288,7 @@ class Delimiter extends Extension {
* @return {string} - the present range (NextMarker) if repd believes * @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on * that it's enough and should move on
*/ */
skippingV1() { skippingV1(): string {
return DbPrefixes.Master + this[this.nextContinueMarker]; return DbPrefixes.Master + this[this.nextContinueMarker];
} }
@ -249,12 +298,12 @@ class Delimiter extends Extension {
* isn't truncated * isn't truncated
* @return {Object} - following amazon format * @return {Object} - following amazon format
*/ */
result() { result(): DelimiterResult {
/* NextMarker is only provided when delimiter is used. /* NextMarker is only provided when delimiter is used.
* specified in v1 listing documentation * specified in v1 listing documentation
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html * http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
*/ */
const result = { const result: DelimiterResult = {
CommonPrefixes: this.CommonPrefixes, CommonPrefixes: this.CommonPrefixes,
Contents: this.Contents, Contents: this.Contents,
IsTruncated: this.IsTruncated, IsTruncated: this.IsTruncated,
@ -271,4 +320,4 @@ class Delimiter extends Extension {
} }
} }
module.exports = { Delimiter }; export { Delimiter, DelimiterParams };

View File

@ -1,10 +1,11 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const Delimiter = require('./delimiter').Delimiter; import { Delimiter } from './delimiter';
const Version = require('../../versioning/Version').Version; import type { DelimiterParams } from './delimiter';
const VSConst = require('../../versioning/constants').VersioningConstants; import { Version } from '../../versioning/Version';
import { VersioningConstants as VSConst } from '../../versioning/constants';
const { BucketVersioningKeyFormat } = VSConst; const { BucketVersioningKeyFormat } = VSConst;
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools'); import { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } from './tools';
const VID_SEP = VSConst.VersionId.Separator; const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes } = VSConst; const { DbPrefixes } = VSConst;
@ -27,7 +28,11 @@ class DelimiterMaster extends Delimiter {
* @param {RequestLogger} logger - The logger of the request * @param {RequestLogger} logger - The logger of the request
* @param {String} [vFormat] - versioning key format * @param {String} [vFormat] - versioning key format
*/ */
constructor(parameters, logger, vFormat) { prvKey?: any; // TODO type
prvPHDKey?: any; // TODO type
inReplayPrefix?: any; // TODO type
constructor(parameters: DelimiterParams, logger: any, vFormat: string) {
super(parameters, logger, vFormat); super(parameters, logger, vFormat);
// non-PHD master version or a version whose master is a PHD version // non-PHD master version or a version whose master is a PHD version
this.prvKey = undefined; this.prvKey = undefined;
@ -58,7 +63,7 @@ class DelimiterMaster extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV0(obj) { filterV0(obj: object): number {
let key = obj.key; let key = obj.key;
const value = obj.value; const value = obj.value;
@ -155,14 +160,14 @@ class DelimiterMaster extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV1(obj) { filterV1(obj: object): number {
// Filtering master keys in v1 is simply listing the master // Filtering master keys in v1 is simply listing the master
// keys, as the state of version keys do not change the // keys, as the state of version keys do not change the
// result, so we can use Delimiter method directly. // result, so we can use Delimiter method directly.
return super.filter(obj); return super.filter(obj);
} }
skippingBase() { skippingBase(): string {
if (this[this.nextContinueMarker]) { if (this[this.nextContinueMarker]) {
// next marker or next continuation token: // next marker or next continuation token:
// - foo/ : skipping foo/ // - foo/ : skipping foo/
@ -177,14 +182,14 @@ class DelimiterMaster extends Delimiter {
return SKIP_NONE; return SKIP_NONE;
} }
skippingV0() { skippingV0(): string {
if (this.inReplayPrefix) { if (this.inReplayPrefix) {
return DbPrefixes.Replay; return DbPrefixes.Replay;
} }
return this.skippingBase(); return this.skippingBase();
} }
skippingV1() { skippingV1(): string {
const skipTo = this.skippingBase(); const skipTo = this.skippingBase();
if (skipTo === SKIP_NONE) { if (skipTo === SKIP_NONE) {
return SKIP_NONE; return SKIP_NONE;
@ -193,4 +198,4 @@ class DelimiterMaster extends Delimiter {
} }
} }
module.exports = { DelimiterMaster }; export { DelimiterMaster };

View File

@ -1,14 +1,32 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const Delimiter = require('./delimiter').Delimiter; import { Delimiter } from './delimiter';
const Version = require('../../versioning/Version').Version; import type { DelimiterParams } from './delimiter';
const VSConst = require('../../versioning/constants').VersioningConstants; import type { MPUParams } from './MPU';
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = import { Version } from '../../versioning/Version';
require('./tools'); import { VersioningConstants as VSConst } from '../../versioning/constants';
import { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } from './tools';
const VID_SEP = VSConst.VersionId.Separator; const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
interface DelimiterVersionsParams extends DelimiterParams {
keyMarker: string; // TODO type
versionIdMarker: any; // TODO type
}
interface DelimiterVersionsResult {
CommonPrefixes: string[];
Versions: any; // TODO type
IsTruncated: boolean,
NextKeyMarker?: any; // TODO type
NextVersionIdMarker?: any; // TODO type
Delimiter: string;
}
/** /**
* Handle object listing with parameters * Handle object listing with parameters
* *
@ -22,7 +40,15 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
* @prop {Number} maxKeys - number of keys to list * @prop {Number} maxKeys - number of keys to list
*/ */
class DelimiterVersions extends Delimiter { class DelimiterVersions extends Delimiter {
constructor(parameters, logger, vFormat) {
keyMarker: string;
versionIdMarker: any;
masterKey?: any; // TODO type
masterVersionId?: any; // TODO type
NextVersionIdMarker: any; // TODO type
inReplayPrefix: boolean;
constructor(parameters: DelimiterVersionsParams, logger: any, vFormat: string) {
super(parameters, logger, vFormat); super(parameters, logger, vFormat);
// specific to version listing // specific to version listing
this.keyMarker = parameters.keyMarker; this.keyMarker = parameters.keyMarker;
@ -49,7 +75,7 @@ class DelimiterVersions extends Delimiter {
}[this.vFormat]); }[this.vFormat]);
} }
genMDParamsV0() { genMDParamsV0(): MPUParams {
const params = {}; const params = {};
if (this.parameters.prefix) { if (this.parameters.prefix) {
params.gte = this.parameters.prefix; params.gte = this.parameters.prefix;
@ -73,40 +99,41 @@ class DelimiterVersions extends Delimiter {
return params; return params;
} }
genMDParamsV1() { genMDParamsV1(): MPUParams[] {
// return an array of two listing params sets to ask for // return an array of two listing params sets to ask for
// synchronized listing of M and V ranges // synchronized listing of M and V ranges
const params = [{}, {}]; const mRangeParams: MPUParams = {};
const vRangeParams: MPUParams = {};
if (this.parameters.prefix) { if (this.parameters.prefix) {
params[0].gte = DbPrefixes.Master + this.parameters.prefix; mRangeParams.gte = DbPrefixes.Master + this.parameters.prefix;
params[0].lt = DbPrefixes.Master + inc(this.parameters.prefix); mRangeParams.lt = DbPrefixes.Master + inc(this.parameters.prefix);
params[1].gte = DbPrefixes.Version + this.parameters.prefix; vRangeParams.gte = DbPrefixes.Version + this.parameters.prefix;
params[1].lt = DbPrefixes.Version + inc(this.parameters.prefix); vRangeParams.lt = DbPrefixes.Version + inc(this.parameters.prefix);
} else { } else {
params[0].gte = DbPrefixes.Master; mRangeParams.gte = DbPrefixes.Master;
params[0].lt = inc(DbPrefixes.Master); // stop after the last master key mRangeParams.lt = inc(DbPrefixes.Master); // stop after the last master key
params[1].gte = DbPrefixes.Version; vRangeParams.gte = DbPrefixes.Version;
params[1].lt = inc(DbPrefixes.Version); // stop after the last version key vRangeParams.lt = inc(DbPrefixes.Version); // stop after the last version key
} }
if (this.parameters.keyMarker) { if (this.parameters.keyMarker) {
if (params[1].gte <= DbPrefixes.Version + this.parameters.keyMarker) { if (vRangeParams.gte <= DbPrefixes.Version + this.parameters.keyMarker) {
delete params[0].gte; delete mRangeParams.gte;
delete params[1].gte; delete vRangeParams.gte;
params[0].gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP); mRangeParams.gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP);
if (this.parameters.versionIdMarker) { if (this.parameters.versionIdMarker) {
// versionIdMarker should always come with keyMarker // versionIdMarker should always come with keyMarker
// but may not be the other way around // but may not be the other way around
params[1].gt = DbPrefixes.Version vRangeParams.gt = DbPrefixes.Version
+ this.parameters.keyMarker + this.parameters.keyMarker
+ VID_SEP + VID_SEP
+ this.parameters.versionIdMarker; + this.parameters.versionIdMarker;
} else { } else {
params[1].gt = DbPrefixes.Version vRangeParams.gt = DbPrefixes.Version
+ inc(this.parameters.keyMarker + VID_SEP); + inc(this.parameters.keyMarker + VID_SEP);
} }
} }
} }
return params; return [mRangeParams, vRangeParams];
} }
/** /**
@ -120,7 +147,7 @@ class DelimiterVersions extends Delimiter {
* * -1 if master key < version key * * -1 if master key < version key
* * 1 if master key > version key * * 1 if master key > version key
*/ */
compareObjects(masterObj, versionObj) { compareObjects(masterObj: object, versionObj: object): number {
const masterKey = masterObj.key.slice(DbPrefixes.Master.length); const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
const versionKey = versionObj.key.slice(DbPrefixes.Version.length); const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
return masterKey < versionKey ? -1 : 1; return masterKey < versionKey ? -1 : 1;
@ -136,7 +163,7 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the key * @param {String} obj.value - The value of the key
* @return {Boolean} - indicates if iteration should continue * @return {Boolean} - indicates if iteration should continue
*/ */
addContents(obj) { addContents(obj: object): boolean {
if (this._reachedMaxKeys()) { if (this._reachedMaxKeys()) {
return FILTER_END; return FILTER_END;
} }
@ -163,7 +190,7 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV0(obj) { filterV0(obj: object): number {
if (obj.key.startsWith(DbPrefixes.Replay)) { if (obj.key.startsWith(DbPrefixes.Replay)) {
this.inReplayPrefix = true; this.inReplayPrefix = true;
return FILTER_SKIP; return FILTER_SKIP;
@ -189,7 +216,7 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV1(obj) { filterV1(obj: object): number {
// this function receives both M and V keys, but their prefix // this function receives both M and V keys, but their prefix
// length is the same so we can remove their prefix without // length is the same so we can remove their prefix without
// looking at the type of key // looking at the type of key
@ -197,7 +224,7 @@ class DelimiterVersions extends Delimiter {
obj.value); obj.value);
} }
filterCommon(key, value) { filterCommon(key: string, value: string): boolean {
if (this.prefix && !key.startsWith(this.prefix)) { if (this.prefix && !key.startsWith(this.prefix)) {
return FILTER_SKIP; return FILTER_SKIP;
} }
@ -230,7 +257,7 @@ class DelimiterVersions extends Delimiter {
return this.addContents({ key: nonversionedKey, value, versionId }); return this.addContents({ key: nonversionedKey, value, versionId });
} }
skippingV0() { skippingV0(): string {
if (this.inReplayPrefix) { if (this.inReplayPrefix) {
return DbPrefixes.Replay; return DbPrefixes.Replay;
} }
@ -243,7 +270,7 @@ class DelimiterVersions extends Delimiter {
return SKIP_NONE; return SKIP_NONE;
} }
skippingV1() { skippingV1(): string {
const skipV0 = this.skippingV0(); const skipV0 = this.skippingV0();
if (skipV0 === SKIP_NONE) { if (skipV0 === SKIP_NONE) {
return SKIP_NONE; return SKIP_NONE;
@ -259,7 +286,7 @@ class DelimiterVersions extends Delimiter {
* isn't truncated * isn't truncated
* @return {Object} - following amazon format * @return {Object} - following amazon format
*/ */
result() { result(): DelimiterVersionsResult {
/* NextMarker is only provided when delimiter is used. /* NextMarker is only provided when delimiter is used.
* specified in v1 listing documentation * specified in v1 listing documentation
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html * http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
@ -276,4 +303,4 @@ class DelimiterVersions extends Delimiter {
} }
} }
module.exports = { DelimiterVersions }; export { DelimiterVersions };

View File

@ -1,9 +0,0 @@
module.exports = {
Basic: require('./basic').List,
Delimiter: require('./delimiter').Delimiter,
DelimiterVersions: require('./delimiterVersions')
.DelimiterVersions,
DelimiterMaster: require('./delimiterMaster')
.DelimiterMaster,
MPU: require('./MPU').MultipartUploads,
};

View File

@ -0,0 +1,13 @@
import { List as Basic } from './basic';
import { Delimiter } from './delimiter';
import { DelimiterVersions } from './delimiterVersions';
import { DelimiterMaster } from './delimiterMaster';
import { MultipartUploads as MPU } from './MPU';
export {
Basic,
Delimiter,
DelimiterVersions,
DelimiterMaster,
MPU,
};

View File

@ -1,10 +1,15 @@
const assert = require('assert'); import { strict as assert } from 'assert';
const { FILTER_END, FILTER_SKIP, SKIP_NONE } = require('./tools'); import { FILTER_END, FILTER_SKIP, SKIP_NONE } from './tools';
const MAX_STREAK_LENGTH = 100; const MAX_STREAK_LENGTH = 100;
interface SkipParams {
extension: any;
gte: any;
}
/** /**
* Handle the filtering and the skip mechanism of a listing result. * Handle the filtering and the skip mechanism of a listing result.
*/ */
@ -15,14 +20,23 @@ class Skip {
* @param {String} params.gte - current range gte (greater than or * @param {String} params.gte - current range gte (greater than or
* equal) used by the client code * equal) used by the client code
*/ */
constructor(params) {
extension: any;
gteParams: any;
listingEndCb?: Function;
skipRangeCb?: Function;
streakLength: number;
constructor(params: SkipParams) {
// TODO - once we're in strict TS everywhere, we no longer need these
// assertions
assert(params.extension); assert(params.extension);
this.extension = params.extension; this.extension = params.extension;
this.gteParams = params.gte; this.gteParams = params.gte;
this.listingEndCb = null; this.listingEndCb = undefined;
this.skipRangeCb = null; this.skipRangeCb = undefined;
/* Used to count consecutive FILTER_SKIP returned by the extension /* Used to count consecutive FILTER_SKIP returned by the extension
* filter method. Once this counter reaches MAX_STREAK_LENGTH, the * filter method. Once this counter reaches MAX_STREAK_LENGTH, the
@ -31,11 +45,11 @@ class Skip {
this.streakLength = 0; this.streakLength = 0;
} }
setListingEndCb(cb) { setListingEndCb(cb: Function) {
this.listingEndCb = cb; this.listingEndCb = cb;
} }
setSkipRangeCb(cb) { setSkipRangeCb(cb: Function) {
this.skipRangeCb = cb; this.skipRangeCb = cb;
} }
@ -47,9 +61,9 @@ class Skip {
* This function calls the listing end or the skip range callbacks if * This function calls the listing end or the skip range callbacks if
* needed. * needed.
*/ */
filter(entry) { filter(entry: object): undefined {
assert(this.listingEndCb); assert(this.listingEndCb !== undefined);
assert(this.skipRangeCb); assert(this.skipRangeCb !== undefined);
const filteringResult = this.extension.filter(entry); const filteringResult = this.extension.filter(entry);
const skippingRange = this.extension.skipping(); const skippingRange = this.extension.skipping();
@ -73,7 +87,7 @@ class Skip {
} }
} }
_inc(str) { _inc(str: string): string {
if (!str) { if (!str) {
return str; return str;
} }
@ -84,5 +98,7 @@ class Skip {
} }
} }
export {
module.exports = Skip; Skip,
SkipParams
}

View File

@ -1,4 +1,6 @@
const { DbPrefixes } = require('../../versioning/constants').VersioningConstants; import { VersioningConstants } from '../../versioning/constants';
const DbPrefixes = VersioningConstants.DbPrefixes;
// constants for extensions // constants for extensions
const SKIP_NONE = undefined; // to be inline with the values of NextMarker const SKIP_NONE = undefined; // to be inline with the values of NextMarker
@ -15,8 +17,8 @@ const FILTER_END = -1;
* @param {Number} limit - The limit to respect * @param {Number} limit - The limit to respect
* @return {Number} - The parsed number || limit * @return {Number} - The parsed number || limit
*/ */
function checkLimit(number, limit) { function checkLimit(str: string, limit: number): number {
const parsed = Number.parseInt(number, 10); const parsed = Number.parseInt(str, 10);
const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit); const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit);
return valid ? parsed : limit; return valid ? parsed : limit;
} }
@ -28,7 +30,7 @@ function checkLimit(number, limit) {
* @return {string} - the incremented string * @return {string} - the incremented string
* or the input if it is not valid * or the input if it is not valid
*/ */
function inc(str) { function inc(str: string): string {
return str ? (str.slice(0, str.length - 1) + return str ? (str.slice(0, str.length - 1) +
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str; String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
} }
@ -40,7 +42,7 @@ function inc(str) {
* @param {object} v0params - listing parameters for v0 format * @param {object} v0params - listing parameters for v0 format
* @return {object} - listing parameters for v1 format * @return {object} - listing parameters for v1 format
*/ */
function listingParamsMasterKeysV0ToV1(v0params) { function listingParamsMasterKeysV0ToV1(v0params: any): any {
const v1params = Object.assign({}, v0params); const v1params = Object.assign({}, v0params);
if (v0params.gt !== undefined) { if (v0params.gt !== undefined) {
v1params.gt = `${DbPrefixes.Master}${v0params.gt}`; v1params.gt = `${DbPrefixes.Master}${v0params.gt}`;
@ -59,7 +61,7 @@ function listingParamsMasterKeysV0ToV1(v0params) {
return v1params; return v1params;
} }
module.exports = { export {
checkLimit, checkLimit,
inc, inc,
listingParamsMasterKeysV0ToV1, listingParamsMasterKeysV0ToV1,

View File

@ -91,7 +91,7 @@ class Vault {
requestContext: serializedRCsArr, requestContext: serializedRCsArr,
}, },
(err, userInfo) => vaultSignatureCb(err, userInfo, (err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback) params.log, callback),
); );
} }
@ -146,7 +146,7 @@ class Vault {
requestContext: serializedRCs, requestContext: serializedRCs,
}, },
(err, userInfo) => vaultSignatureCb(err, userInfo, (err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback, streamingV4Params) params.log, callback, streamingV4Params),
); );
} }

View File

@ -187,7 +187,7 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
.filter(headerName => .filter(headerName =>
headerName.startsWith('x-amz-') headerName.startsWith('x-amz-')
|| headerName.startsWith('x-scal-') || headerName.startsWith('x-scal-')
|| headerName === 'host' || headerName === 'host',
).sort().join(';'); ).sort().join(';');
const params = { request, signedHeaders, payloadChecksum, const params = { request, signedHeaders, payloadChecksum,
credentialScope, timestamp, query: data, credentialScope, timestamp, query: data,

View File

@ -29,7 +29,7 @@ class ChainBackend extends BaseBackend {
typeof client.getCanonicalIds === 'function' && typeof client.getCanonicalIds === 'function' &&
typeof client.getEmailAddresses === 'function' && typeof client.getEmailAddresses === 'function' &&
typeof client.checkPolicies === 'function' && typeof client.checkPolicies === 'function' &&
typeof client.healthcheck === 'function' typeof client.healthcheck === 'function',
), 'invalid client: missing required auth backend methods'); ), 'invalid client: missing required auth backend methods');
this._clients = clients; this._clients = clients;
} }
@ -55,7 +55,7 @@ class ChainBackend extends BaseBackend {
signatureFromRequest, signatureFromRequest,
accessKey, accessKey,
options, options,
done done,
), callback); ), callback);
} }
@ -67,7 +67,7 @@ class ChainBackend extends BaseBackend {
region, region,
scopeDate, scopeDate,
options, options,
done done,
), callback); ), callback);
} }
@ -151,7 +151,7 @@ class ChainBackend extends BaseBackend {
requestContextParams, requestContextParams,
userArn, userArn,
options, options,
done done,
), (err, res) => { ), (err, res) => {
if (err) { if (err) {
return callback(err); return callback(err);
@ -169,7 +169,7 @@ class ChainBackend extends BaseBackend {
client.healthcheck(reqUid, (err, res) => done(null, { client.healthcheck(reqUid, (err, res) => done(null, {
error: !!err ? err : null, error: !!err ? err : null,
status: res, status: res,
}) }),
), (err, res) => { ), (err, res) => {
if (err) { if (err) {
return callback(err); return callback(err);

View File

@ -273,7 +273,7 @@ class V4Transform extends Transform {
} }
// get next chunk // get next chunk
return callback(); return callback();
} },
); );
} }
} }

View File

@ -1,5 +1,6 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const crypto = require('crypto');
import { createHash } from 'crypto';
// The min value here is to manage further backward compat if we // The min value here is to manage further backward compat if we
// need it // need it
@ -10,7 +11,7 @@ const iamSecurityTokenPattern =
new RegExp(`^[a-f0-9]{${iamSecurityTokenSizeMin},` + new RegExp(`^[a-f0-9]{${iamSecurityTokenSizeMin},` +
`${iamSecurityTokenSizeMax}}$`); `${iamSecurityTokenSizeMax}}$`);
module.exports = { export default {
// info about the iam security token // info about the iam security token
iamSecurityToken: { iamSecurityToken: {
min: iamSecurityTokenSizeMin, min: iamSecurityTokenSizeMin,
@ -92,7 +93,7 @@ module.exports = {
replicationBackends: { aws_s3: true, azure: true, gcp: true }, replicationBackends: { aws_s3: true, azure: true, gcp: true },
// hex digest of sha256 hash of empty string: // hex digest of sha256 hash of empty string:
emptyStringHash: crypto.createHash('sha256') emptyStringHash: createHash('sha256')
.update('', 'binary').digest('hex'), .update('', 'binary').digest('hex'),
mpuMDStoredExternallyBackend: { aws_s3: true, gcp: true }, mpuMDStoredExternallyBackend: { aws_s3: true, gcp: true },
// AWS sets a minimum size limit for parts except for the last part. // AWS sets a minimum size limit for parts except for the last part.

182
lib/db.js
View File

@ -1,182 +0,0 @@
'use strict'; // eslint-disable-line strict
const writeOptions = { sync: true };
/**
* Like Error, but with a property set to true.
* TODO: this is copied from kineticlib, should consolidate with the
* future errors module
*
* Example: instead of:
* const err = new Error("input is not a buffer");
* err.badTypeInput = true;
* throw err;
* use:
* throw propError("badTypeInput", "input is not a buffer");
*
* @param {String} propName - the property name.
* @param {String} message - the Error message.
* @returns {Error} the Error object.
*/
function propError(propName, message) {
const err = new Error(message);
err[propName] = true;
return err;
}
/**
* Running transaction with multiple updates to be committed atomically
*/
class IndexTransaction {
/**
* Builds a new transaction
*
* @argument {Leveldb} db an open database to which the updates
* will be applied
*
* @returns {IndexTransaction} a new empty transaction
*/
constructor(db) {
this.operations = [];
this.db = db;
this.closed = false;
this.conditions = [];
}
/**
* Adds a new operation to participate in this running transaction
*
* @argument {object} op an object with the following attributes:
* {
* type: 'put' or 'del',
* key: the object key,
* value: (optional for del) the value to store,
* }
*
* @throws {Error} an error described by the following properties
* - invalidTransactionVerb if op is not put or del
* - pushOnCommittedTransaction if already committed
* - missingKey if the key is missing from the op
* - missingValue if putting without a value
*
* @returns {undefined}
*/
push(op) {
if (this.closed) {
throw propError('pushOnCommittedTransaction',
'can not add ops to already committed transaction');
}
if (op.type !== 'put' && op.type !== 'del') {
throw propError('invalidTransactionVerb',
`unknown action type: ${op.type}`);
}
if (op.key === undefined) {
throw propError('missingKey', 'missing key');
}
if (op.type === 'put' && op.value === undefined) {
throw propError('missingValue', 'missing value');
}
this.operations.push(op);
}
/**
* Adds a new put operation to this running transaction
*
* @argument {string} key - the key of the object to put
* @argument {string} value - the value to put
*
* @throws {Error} an error described by the following properties
* - pushOnCommittedTransaction if already committed
* - missingKey if the key is missing from the op
* - missingValue if putting without a value
*
* @returns {undefined}
*
* @see push
*/
put(key, value) {
this.push({ type: 'put', key, value });
}
/**
* Adds a new del operation to this running transaction
*
* @argument {string} key - the key of the object to delete
*
* @throws {Error} an error described by the following properties
* - pushOnCommittedTransaction if already committed
* - missingKey if the key is missing from the op
*
* @returns {undefined}
*
* @see push
*/
del(key) {
this.push({ type: 'del', key });
}
/**
* Adds a condition for the transaction
*
* @argument {object} condition an object with the following attributes:
* {
* <condition>: the object key
* }
* example: { notExists: 'key1' }
*
* @throws {Error} an error described by the following properties
* - pushOnCommittedTransaction if already committed
* - missingCondition if the condition is empty
*
* @returns {undefined}
*/
addCondition(condition) {
if (this.closed) {
throw propError('pushOnCommittedTransaction',
'can not add conditions to already committed transaction');
}
if (condition === undefined || Object.keys(condition).length === 0) {
throw propError('missingCondition', 'missing condition for conditional put');
}
if (typeof (condition.notExists) !== 'string') {
throw propError('unsupportedConditionalOperation', 'missing key or supported condition');
}
this.conditions.push(condition);
}
/**
* Applies the queued updates in this transaction atomically.
*
* @argument {function} cb function to be called when the commit
* finishes, taking an optional error argument
*
* @returns {undefined}
*/
commit(cb) {
if (this.closed) {
return cb(propError('alreadyCommitted',
'transaction was already committed'));
}
if (this.operations.length === 0) {
return cb(propError('emptyTransaction',
'tried to commit an empty transaction'));
}
this.closed = true;
writeOptions.conditions = this.conditions;
// The array-of-operations variant of the `batch` method
// allows passing options such has `sync: true` whereas the
// chained form does not.
return this.db.batch(this.operations, writeOptions, cb);
}
}
module.exports = {
IndexTransaction,
};

View File

@ -1,4 +1,4 @@
function reshapeExceptionError(error) { export function reshapeExceptionError(error: any) {
const { message, code, stack, name } = error; const { message, code, stack, name } = error;
return { return {
message, message,
@ -7,7 +7,3 @@ function reshapeExceptionError(error) {
name, name,
}; };
} }
module.exports = {
reshapeExceptionError,
};

View File

@ -100,6 +100,8 @@ class ArsenalError extends Error {
const errors = ArsenalError.errorMap const errors = ArsenalError.errorMap
export type { ArsenalError };
export default { export default {
...errors ...errors
}; };

View File

@ -1,6 +1,8 @@
'use strict'; // eslint-disable-line 'use strict'; // eslint-disable-line
const debug = require('util').debuglog('jsutil'); import { debuglog } from 'util';
const debug = debuglog('jsutil');
// JavaScript utility functions // JavaScript utility functions
@ -17,9 +19,9 @@ const debug = require('util').debuglog('jsutil');
* @return {function} a callable wrapper mirroring <tt>func</tt> but * @return {function} a callable wrapper mirroring <tt>func</tt> but
* only calls <tt>func</tt> at first invocation. * only calls <tt>func</tt> at first invocation.
*/ */
module.exports.once = function once(func) { export function once(func: Function): Function {
const state = { called: false, res: undefined }; const state = { called: false, res: undefined };
return function wrapper(...args) { return function wrapper(...args: any) {
if (!state.called) { if (!state.called) {
state.called = true; state.called = true;
state.res = func.apply(func, args); state.res = func.apply(func, args);

View File

@ -17,7 +17,7 @@ class RedisClient {
method: 'RedisClient.constructor', method: 'RedisClient.constructor',
redisHost: config.host, redisHost: config.host,
redisPort: config.port, redisPort: config.port,
}) }),
); );
return this; return this;
} }

View File

@ -9,7 +9,6 @@ const StatsClient = require('./StatsClient');
* rather than by seconds * rather than by seconds
*/ */
class StatsModel extends StatsClient { class StatsModel extends StatsClient {
/** /**
* Utility method to convert 2d array rows to columns, and vice versa * Utility method to convert 2d array rows to columns, and vice versa
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip * See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip

View File

@ -1126,7 +1126,7 @@ class LifecycleConfiguration {
`<NoncurrentDays>${noncurrentDays}` + `<NoncurrentDays>${noncurrentDays}` +
'</NoncurrentDays>', '</NoncurrentDays>',
`<StorageClass>${storageClass}</StorageClass>`, `<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>` `</${actionName}>`,
); );
}); });
Action = xml.join(''); Action = xml.join('');
@ -1146,7 +1146,7 @@ class LifecycleConfiguration {
`<${actionName}>`, `<${actionName}>`,
element, element,
`<StorageClass>${storageClass}</StorageClass>`, `<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>` `</${actionName}>`,
); );
}); });
Action = xml.join(''); Action = xml.join('');

View File

@ -10,7 +10,6 @@ const ObjectMDLocation = require('./ObjectMDLocation');
* mpuPart metadata for example) * mpuPart metadata for example)
*/ */
class ObjectMD { class ObjectMD {
/** /**
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is * Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
* reserved for internal use, users should call * reserved for internal use, users should call

View File

@ -3,7 +3,6 @@
* 'location' array * 'location' array
*/ */
class ObjectMDLocation { class ObjectMDLocation {
/** /**
* @constructor * @constructor
* @param {object} locationObj - single data location info * @param {object} locationObj - single data location info

View File

@ -10,7 +10,6 @@ const { checkSupportIPv6 } = require('./utils');
class Server { class Server {
/** /**
* @constructor * @constructor
* *

View File

@ -342,8 +342,6 @@ class KMIP {
return cb(null, response); return cb(null, response);
}); });
} }
} }

View File

@ -26,7 +26,7 @@ function sendError(res, log, error, optMessage) {
httpCode: error.code, httpCode: error.code,
errorType: error.message, errorType: error.message,
error: message, error: message,
} },
); );
res.writeHead(error.code); res.writeHead(error.code);
res.end(JSON.stringify({ res.end(JSON.stringify({

View File

@ -45,7 +45,6 @@ function sendError(res, log, error, optMessage) {
* start() to start listening to the configured port. * start() to start listening to the configured port.
*/ */
class RESTServer extends httpServer { class RESTServer extends httpServer {
/** /**
* @constructor * @constructor
* @param {Object} params - constructor params * @param {Object} params - constructor params

View File

@ -17,7 +17,6 @@ const rpc = require('./rpc.js');
* RPC client object accessing the sub-level transparently. * RPC client object accessing the sub-level transparently.
*/ */
class LevelDbClient extends rpc.BaseClient { class LevelDbClient extends rpc.BaseClient {
/** /**
* @constructor * @constructor
* *
@ -78,7 +77,6 @@ class LevelDbClient extends rpc.BaseClient {
* env.subDb (env is passed as first parameter of received RPC calls). * env.subDb (env is passed as first parameter of received RPC calls).
*/ */
class LevelDbService extends rpc.BaseService { class LevelDbService extends rpc.BaseService {
/** /**
* @constructor * @constructor
* *

View File

@ -37,7 +37,6 @@ let streamRPCJSONObj;
* an error occurred). * an error occurred).
*/ */
class BaseClient extends EventEmitter { class BaseClient extends EventEmitter {
/** /**
* @constructor * @constructor
* *
@ -251,7 +250,6 @@ class BaseClient extends EventEmitter {
* *
*/ */
class BaseService { class BaseService {
/** /**
* @constructor * @constructor
* *

View File

@ -1,9 +1,10 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const Ajv = require('ajv'); import Ajv from 'ajv';
const userPolicySchema = require('./userPolicySchema'); import * as userPolicySchema from './userPolicySchema.json';
const resourcePolicySchema = require('./resourcePolicySchema'); import * as resourcePolicySchema from './resourcePolicySchema.json';
const errors = require('../errors'); import errors from '../errors';
import type { ArsenalError } from '../errors';
const ajValidate = new Ajv({ allErrors: true }); const ajValidate = new Ajv({ allErrors: true });
ajValidate.addMetaSchema(require('ajv/lib/refs/json-schema-draft-06.json')); ajValidate.addMetaSchema(require('ajv/lib/refs/json-schema-draft-06.json'));
@ -27,7 +28,7 @@ const errDict = {
}; };
// parse ajv errors and return early with the first relevant error // parse ajv errors and return early with the first relevant error
function _parseErrors(ajvErrors, policyType) { function _parseErrors(ajvErrors: Ajv.ErrorObject[], policyType: string) {
let parsedErr; let parsedErr;
if (policyType === 'user') { if (policyType === 'user') {
// deep copy is needed as we have to assign custom error description // deep copy is needed as we have to assign custom error description
@ -67,7 +68,7 @@ function _parseErrors(ajvErrors, policyType) {
} }
// parse JSON safely without throwing an exception // parse JSON safely without throwing an exception
function _safeJSONParse(s) { function _safeJSONParse(s: string): object {
try { try {
return JSON.parse(s); return JSON.parse(s);
} catch (e) { } catch (e) {
@ -75,9 +76,20 @@ function _safeJSONParse(s) {
} }
} }
/**
* @typedef ValidationResult
* @type Object
* @property {Array|null} error - list of validation errors or null
* @property {Bool} valid - true/false depending on the validation result
*/
interface ValidationResult {
error: ArsenalError;
valid: boolean;
}
// validates policy using the validation schema // validates policy using the validation schema
function _validatePolicy(type, policy) { function _validatePolicy(policyType: string, policy: string): ValidationResult {
if (type === 'user') { if (policyType === 'user') {
const parseRes = _safeJSONParse(policy); const parseRes = _safeJSONParse(policy);
if (parseRes instanceof Error) { if (parseRes instanceof Error) {
return { error: Object.assign({}, errors.MalformedPolicyDocument), return { error: Object.assign({}, errors.MalformedPolicyDocument),
@ -90,7 +102,7 @@ function _validatePolicy(type, policy) {
} }
return { error: null, valid: true }; return { error: null, valid: true };
} }
if (type === 'resource') { if (policyType === 'resource') {
const parseRes = _safeJSONParse(policy); const parseRes = _safeJSONParse(policy);
if (parseRes instanceof Error) { if (parseRes instanceof Error) {
return { error: Object.assign({}, errors.MalformedPolicy), return { error: Object.assign({}, errors.MalformedPolicy),
@ -105,19 +117,14 @@ function _validatePolicy(type, policy) {
} }
return { error: errors.NotImplemented, valid: false }; return { error: errors.NotImplemented, valid: false };
} }
/**
* @typedef ValidationResult
* @type Object
* @property {Array|null} error - list of validation errors or null
* @property {Bool} valid - true/false depending on the validation result
*/
/** /**
* Validates user policy * Validates user policy
* @param {String} policy - policy json * @param {String} policy - policy json
* @returns {Object} - returns object with properties error and value * @returns {Object} - returns object with properties error and value
* @returns {ValidationResult} - result of the validation * @returns {ValidationResult} - result of the validation
*/ */
function validateUserPolicy(policy) { function validateUserPolicy(policy: string): ValidationResult {
return _validatePolicy('user', policy); return _validatePolicy('user', policy);
} }
@ -127,11 +134,11 @@ function validateUserPolicy(policy) {
* @returns {Object} - returns object with properties error and value * @returns {Object} - returns object with properties error and value
* @returns {ValidationResult} - result of the validation * @returns {ValidationResult} - result of the validation
*/ */
function validateResourcePolicy(policy) { function validateResourcePolicy(policy: string): ValidationResult {
return _validatePolicy('resource', policy); return _validatePolicy('resource', policy);
} }
module.exports = { export {
validateUserPolicy, validateUserPolicy,
validateResourcePolicy, validateResourcePolicy,
}; };

View File

@ -6,7 +6,6 @@ const crypto = require('crypto');
* data through a stream * data through a stream
*/ */
class MD5Sum extends Transform { class MD5Sum extends Transform {
/** /**
* @constructor * @constructor
*/ */
@ -40,7 +39,6 @@ class MD5Sum extends Transform {
this.emit('hashed'); this.emit('hashed');
callback(null); callback(null);
} }
} }
module.exports = MD5Sum; module.exports = MD5Sum;

View File

@ -121,7 +121,7 @@ log, cb) => {
return cb(errors.BadDigest); return cb(errors.BadDigest);
} }
return cb(errors.InternalError.customizeDescription( return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err.message}`) `Error returned from Azure: ${err.message}`),
); );
} }
const md5 = result.headers['content-md5'] || ''; const md5 = result.headers['content-md5'] || '';

View File

@ -33,7 +33,7 @@ convertMethods.listMultipartUploads = xmlParams => {
xml.push('<?xml version="1.0" encoding="UTF-8"?>', xml.push('<?xml version="1.0" encoding="UTF-8"?>',
'<ListMultipartUploadsResult ' + '<ListMultipartUploadsResult ' +
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">', 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>` `<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`,
); );
// For certain XML elements, if it is `undefined`, AWS returns either an // For certain XML elements, if it is `undefined`, AWS returns either an
@ -58,7 +58,7 @@ convertMethods.listMultipartUploads = xmlParams => {
}); });
xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`, xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`,
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>` `<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`,
); );
l.Uploads.forEach(upload => { l.Uploads.forEach(upload => {
@ -84,14 +84,14 @@ convertMethods.listMultipartUploads = xmlParams => {
`<StorageClass>${escapeForXml(val.StorageClass)}` + `<StorageClass>${escapeForXml(val.StorageClass)}` +
'</StorageClass>', '</StorageClass>',
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`, `<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
'</Upload>' '</Upload>',
); );
}); });
l.CommonPrefixes.forEach(prefix => { l.CommonPrefixes.forEach(prefix => {
xml.push('<CommonPrefixes>', xml.push('<CommonPrefixes>',
`<Prefix>${escapeForXml(prefix)}</Prefix>`, `<Prefix>${escapeForXml(prefix)}</Prefix>`,
'</CommonPrefixes>' '</CommonPrefixes>',
); );
}); });

View File

@ -5,7 +5,6 @@ const Readable = require('stream').Readable;
* This class is used to produce zeros filled buffers for a reader consumption * This class is used to produce zeros filled buffers for a reader consumption
*/ */
class NullStream extends Readable { class NullStream extends Readable {
/** /**
* Construct a new zeros filled buffers producer that will * Construct a new zeros filled buffers producer that will
* produce as much bytes as specified by the range parameter, or the size * produce as much bytes as specified by the range parameter, or the size

View File

@ -118,7 +118,7 @@ const XMLResponseBackend = {
`<Message>${errCode.description}</Message>`, `<Message>${errCode.description}</Message>`,
'<Resource></Resource>', '<Resource></Resource>',
`<RequestId>${log.getSerializedUids()}</RequestId>`, `<RequestId>${log.getSerializedUids()}</RequestId>`,
'</Error>' '</Error>',
); );
const xmlStr = xml.join(''); const xmlStr = xml.join('');
const bytesSent = Buffer.byteLength(xmlStr); const bytesSent = Buffer.byteLength(xmlStr);
@ -377,7 +377,7 @@ function retrieveData(locations, retrieveDataParams, response, log) {
// call end for all cases (error/success) per node.js docs // call end for all cases (error/success) per node.js docs
// recommendation // recommendation
response.end(); response.end();
} },
); );
} }
@ -592,7 +592,7 @@ const routesUtils = {
`<h1>${err.code} ${response.statusMessage}</h1>`, `<h1>${err.code} ${response.statusMessage}</h1>`,
'<ul>', '<ul>',
`<li>Code: ${err.message}</li>`, `<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>` `<li>Message: ${err.description}</li>`,
); );
if (!userErrorPageFailure && bucketName) { if (!userErrorPageFailure && bucketName) {
@ -602,7 +602,7 @@ const routesUtils = {
`<li>RequestId: ${log.getSerializedUids()}</li>`, `<li>RequestId: ${log.getSerializedUids()}</li>`,
// AWS response contains HostId here. // AWS response contains HostId here.
// TODO: consider adding // TODO: consider adding
'</ul>' '</ul>',
); );
if (userErrorPageFailure) { if (userErrorPageFailure) {
html.push( html.push(
@ -612,13 +612,13 @@ const routesUtils = {
'<ul>', '<ul>',
`<li>Code: ${err.message}</li>`, `<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>`, `<li>Message: ${err.description}</li>`,
'</ul>' '</ul>',
); );
} }
html.push( html.push(
'<hr/>', '<hr/>',
'</body>', '</body>',
'</html>' '</html>',
); );
return response.end(html.join(''), 'utf8', () => { return response.end(html.join(''), 'utf8', () => {
@ -848,7 +848,7 @@ const routesUtils = {
return bucketName; return bucketName;
} }
throw new Error( throw new Error(
`bad request: hostname ${host} is not in valid endpoints` `bad request: hostname ${host} is not in valid endpoints`,
); );
}, },

View File

@ -1,6 +1,6 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const randomBytes = require('crypto').randomBytes; import { randomBytes } from 'crypto';
/* /*
* This set of function allows us to create an efficient shuffle * This set of function allows us to create an efficient shuffle
@ -18,13 +18,13 @@ const randomBytes = require('crypto').randomBytes;
* @return {number} the lowest number of bits * @return {number} the lowest number of bits
* @throws Error if number < 0 * @throws Error if number < 0
*/ */
function bitsNeeded(number) { function bitsNeeded(num: number): number {
if (number < 0) { if (num < 0) {
throw new Error('Input must be greater than or equal to zero'); throw new Error('Input must be greater than or equal to zero');
} else if (number === 0) { } else if (num === 0) {
return 1; return 1;
} else { } else {
return Math.floor(Math.log2(number)) + 1; return Math.floor(Math.log2(num)) + 1;
} }
} }
@ -36,7 +36,7 @@ function bitsNeeded(number) {
* if numbits === 0 * if numbits === 0
* @throws Error if numBits < 0 * @throws Error if numBits < 0
*/ */
function createMaskOnes(numBits) { function createMaskOnes(numBits: number): number {
if (numBits < 0) { if (numBits < 0) {
throw new Error('Input must be greater than or equal to zero'); throw new Error('Input must be greater than or equal to zero');
} }
@ -50,7 +50,7 @@ function createMaskOnes(numBits) {
* @return {buffer} a InRangebuffer with 'howMany' pseudo-random bytes. * @return {buffer} a InRangebuffer with 'howMany' pseudo-random bytes.
* @throws Error if numBytes < 0 or if insufficient entropy * @throws Error if numBytes < 0 or if insufficient entropy
*/ */
function nextBytes(numBytes) { function nextBytes(numBytes: number): Buffer {
if (numBytes < 0) { if (numBytes < 0) {
throw new Error('Input must be greater than or equal to zero'); throw new Error('Input must be greater than or equal to zero');
} }
@ -67,7 +67,7 @@ function nextBytes(numBytes) {
* @return {number} the number of bytes needed * @return {number} the number of bytes needed
* @throws Error if numBits < 0 * @throws Error if numBits < 0
*/ */
function bitsToBytes(numBits) { function bitsToBytes(numBits: number): number {
if (numBits < 0) { if (numBits < 0) {
throw new Error('Input must be greater than or equal to zero'); throw new Error('Input must be greater than or equal to zero');
} }
@ -83,7 +83,7 @@ function bitsToBytes(numBits) {
* @return {number} - a pseudo-random integer in [min,max], undefined if * @return {number} - a pseudo-random integer in [min,max], undefined if
* min >= max * min >= max
*/ */
function randomRange(min, max) { function randomRange(min: number, max: number): number {
if (max < min) { if (max < min) {
throw new Error('Invalid range'); throw new Error('Invalid range');
} }
@ -98,7 +98,7 @@ function randomRange(min, max) {
// we use a mask as an optimization: it increases the chances for the // we use a mask as an optimization: it increases the chances for the
// candidate to be in range // candidate to be in range
const mask = createMaskOnes(bits); const mask = createMaskOnes(bits);
let candidate; let candidate: number;
do { do {
candidate = parseInt(nextBytes(bytes).toString('hex'), 16) & mask; candidate = parseInt(nextBytes(bytes).toString('hex'), 16) & mask;
} while (candidate > range); } while (candidate > range);
@ -111,7 +111,7 @@ function randomRange(min, max) {
* @param {Array} array - Any type of array * @param {Array} array - Any type of array
* @return {Array} - The sorted array * @return {Array} - The sorted array
*/ */
module.exports = function shuffle(array) { export default function shuffle<T>(array: T[]): T[] {
for (let i = array.length - 1; i > 0; i--) { for (let i = array.length - 1; i > 0; i--) {
const randIndex = randomRange(0, i); const randIndex = randomRange(0, i);
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */

View File

@ -11,7 +11,7 @@ const { createLogger, logHelper, removeQuotes, trimXMetaPrefix } =
const missingVerIdInternalError = errors.InternalError.customizeDescription( const missingVerIdInternalError = errors.InternalError.customizeDescription(
'Invalid state. Please ensure versioning is enabled ' + 'Invalid state. Please ensure versioning is enabled ' +
'in AWS for the location constraint and try again.' 'in AWS for the location constraint and try again.',
); );
class AwsClient { class AwsClient {
@ -94,7 +94,7 @@ class AwsClient {
err, this._dataStoreName, this.clientType); err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
if (!data.VersionId && this._supportsVersioning) { if (!data.VersionId && this._supportsVersioning) {
@ -233,7 +233,7 @@ class AwsClient {
} }
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
return callback(); return callback();
@ -307,7 +307,7 @@ class AwsClient {
err, this._dataStoreName, this.clientType); err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
return callback(null, mpuResObj); return callback(null, mpuResObj);
@ -335,7 +335,7 @@ class AwsClient {
'on uploadPart', err, this._dataStoreName, this.clientType); 'on uploadPart', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
// Because we manually add quotes to ETag later, remove quotes here // Because we manually add quotes to ETag later, remove quotes here
@ -363,7 +363,7 @@ class AwsClient {
err, this._dataStoreName, this.clientType); err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
// build storedParts object to mimic Scality S3 backend returns // build storedParts object to mimic Scality S3 backend returns
@ -435,7 +435,7 @@ class AwsClient {
'completeMPU', err, this._dataStoreName, this.clientType); 'completeMPU', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
if (!completeMpuRes.VersionId && this._supportsVersioning) { if (!completeMpuRes.VersionId && this._supportsVersioning) {
@ -453,7 +453,7 @@ class AwsClient {
'headObject', err, this._dataStoreName, this.clientType); 'headObject', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
// remove quotes from eTag because they're added later // remove quotes from eTag because they're added later
@ -481,7 +481,7 @@ class AwsClient {
this.clientType); this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
return callback(); return callback();
@ -510,7 +510,7 @@ class AwsClient {
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
return callback(); return callback();
@ -533,7 +533,7 @@ class AwsClient {
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
return callback(); return callback();
@ -570,14 +570,14 @@ class AwsClient {
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.AccessDenied return callback(errors.AccessDenied
.customizeDescription('Error: Unable to access ' + .customizeDescription('Error: Unable to access ' +
`${sourceAwsBucketName} ${this.type} bucket`) `${sourceAwsBucketName} ${this.type} bucket`),
); );
} }
logHelper(log, 'error', 'error from data backend on ' + logHelper(log, 'error', 'error from data backend on ' +
'copyObject', err, this._dataStoreName, this.clientType); 'copyObject', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
if (!copyResult.VersionId && this._supportsVersioning) { if (!copyResult.VersionId && this._supportsVersioning) {
@ -629,14 +629,14 @@ class AwsClient {
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.AccessDenied return callback(errors.AccessDenied
.customizeDescription('Error: Unable to access ' + .customizeDescription('Error: Unable to access ' +
`${sourceAwsBucketName} AWS bucket`) `${sourceAwsBucketName} AWS bucket`),
); );
} }
logHelper(log, 'error', 'error from data backend on ' + logHelper(log, 'error', 'error from data backend on ' +
'uploadPartCopy', err, this._dataStoreName, this.clientType); 'uploadPartCopy', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
const eTag = removeQuotes(res.CopyPartResult.ETag); const eTag = removeQuotes(res.CopyPartResult.ETag);

View File

@ -422,14 +422,14 @@ class AzureClient {
this._dataStoreName); this._dataStoreName);
return callback(errors.AccessDenied return callback(errors.AccessDenied
.customizeDescription('Error: Unable to access ' + .customizeDescription('Error: Unable to access ' +
`${sourceContainerName} Azure Container`) `${sourceContainerName} Azure Container`),
); );
} }
logHelper(log, 'error', 'error from data backend on ' + logHelper(log, 'error', 'error from data backend on ' +
'copyObject', err, this._dataStoreName); 'copyObject', err, this._dataStoreName);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`AWS: ${err.message}`) `AWS: ${err.message}`),
); );
} }
if (res.copy.status === 'pending') { if (res.copy.status === 'pending') {
@ -443,12 +443,12 @@ class AzureClient {
'on abortCopyBlob', err, this._dataStoreName); 'on abortCopyBlob', err, this._dataStoreName);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`AWS on abortCopyBlob: ${err.message}`) `AWS on abortCopyBlob: ${err.message}`),
); );
} }
return callback(errors.InvalidObjectState return callback(errors.InvalidObjectState
.customizeDescription('Error: Azure copy status was ' + .customizeDescription('Error: Azure copy status was ' +
'pending. It has been aborted successfully') 'pending. It has been aborted successfully'),
); );
}); });
} }

View File

@ -123,7 +123,7 @@ class GcpClient extends AwsClient {
err, this._dataStoreName, this.clientType); err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`GCP: ${err.message}`) `GCP: ${err.message}`),
); );
} }
return callback(null, mpuResObj); return callback(null, mpuResObj);
@ -168,7 +168,7 @@ class GcpClient extends AwsClient {
'completeMPU', err, this._dataStoreName, this.clientType); 'completeMPU', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`GCP: ${err.message}`) `GCP: ${err.message}`),
); );
} }
if (!completeMpuRes.VersionId) { if (!completeMpuRes.VersionId) {
@ -210,7 +210,7 @@ class GcpClient extends AwsClient {
'on uploadPart', err, this._dataStoreName, this.clientType); 'on uploadPart', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`GCP: ${err.message}`) `GCP: ${err.message}`),
); );
} }
// remove quotes from eTag because they're added later // remove quotes from eTag because they're added later
@ -242,7 +242,7 @@ class GcpClient extends AwsClient {
if (copySourceRange) { if (copySourceRange) {
return callback(errors.NotImplemented return callback(errors.NotImplemented
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.clientType}: copySourceRange not implemented`) `${this.clientType}: copySourceRange not implemented`),
); );
} }
@ -261,14 +261,14 @@ class GcpClient extends AwsClient {
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.AccessDenied return callback(errors.AccessDenied
.customizeDescription('Error: Unable to access ' + .customizeDescription('Error: Unable to access ' +
`${sourceGcpBucketName} GCP bucket`) `${sourceGcpBucketName} GCP bucket`),
); );
} }
logHelper(log, 'error', 'error from data backend on ' + logHelper(log, 'error', 'error from data backend on ' +
'uploadPartCopy', err, this._dataStoreName); 'uploadPartCopy', err, this._dataStoreName);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`GCP: ${err.message}`) `GCP: ${err.message}`),
); );
} }
// remove quotes from eTag because they're added later // remove quotes from eTag because they're added later
@ -291,7 +291,7 @@ class GcpClient extends AwsClient {
'on abortMPU', err, this._dataStoreName, this.clientType); 'on abortMPU', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`GCP: ${err.message}`) `GCP: ${err.message}`),
); );
} }
return callback(); return callback();

View File

@ -45,7 +45,7 @@ class PfsClient {
} }
return callback(null, keyContext.objectKey, '', return callback(null, keyContext.objectKey, '',
keyContext.metaHeaders['x-amz-meta-size'], keyContext.metaHeaders['x-amz-meta-size'],
md5 md5,
); );
} }
logHelper(log, 'error', 'Not implemented', errors.NotImplemented, logHelper(log, 'error', 'Not implemented', errors.NotImplemented,

View File

@ -35,7 +35,6 @@ const FOLDER_HASH = 3511;
* directory hash structure under the configured dataPath. * directory hash structure under the configured dataPath.
*/ */
class DataFileStore { class DataFileStore {
/** /**
* @constructor * @constructor
* @param {Object} dataConfig - configuration of the file backend * @param {Object} dataConfig - configuration of the file backend

View File

@ -30,7 +30,6 @@ class ListRecordStream extends stream.Transform {
* @classdesc Proxy object to access raft log API * @classdesc Proxy object to access raft log API
*/ */
class LogConsumer { class LogConsumer {
/** /**
* @constructor * @constructor
* *

View File

@ -14,13 +14,13 @@ const _operatorType1 = joi.string().valid(
'$gt', '$gt',
'$gte', '$gte',
'$lt', '$lt',
'$lte' '$lte',
); );
// supports strings, numbers, and boolean // supports strings, numbers, and boolean
const _operatorType2 = joi.string().valid( const _operatorType2 = joi.string().valid(
'$eq', '$eq',
'$ne' '$ne',
); );
const _valueType1 = joi.alternatives([ const _valueType1 = joi.alternatives([

View File

@ -17,7 +17,6 @@ const METASTORE = '__metastore';
const itemScanRefreshDelay = 1000 * 30 * 60; // 30 minutes const itemScanRefreshDelay = 1000 * 30 * 60; // 30 minutes
class BucketFileInterface { class BucketFileInterface {
/** /**
* @constructor * @constructor
* @param {object} [params] - constructor params * @param {object} [params] - constructor params

View File

@ -8,7 +8,6 @@ const { RecordLogProxy } = require('./RecordLog.js');
const werelogs = require('werelogs'); const werelogs = require('werelogs');
class MetadataFileClient { class MetadataFileClient {
/** /**
* Construct a metadata client * Construct a metadata client
* *

View File

@ -25,7 +25,6 @@ const SYNC_OPTIONS = { sync: true };
const SUBLEVEL_SEP = '::'; const SUBLEVEL_SEP = '::';
class MetadataFileServer { class MetadataFileServer {
/** /**
* Construct a metadata server * Construct a metadata server
* *

View File

@ -18,7 +18,6 @@ const DEFAULT_RECORD_LOG_NAME = 's3-recordlog';
* object. * object.
*/ */
class RecordLogProxy extends rpc.BaseClient { class RecordLogProxy extends rpc.BaseClient {
constructor(params) { constructor(params) {
super(params); super(params);
@ -102,7 +101,6 @@ class ListRecordStream extends stream.Transform {
* updates can be transactional with each other. * updates can be transactional with each other.
*/ */
class RecordLogService extends rpc.BaseService { class RecordLogService extends rpc.BaseService {
/** /**
* @constructor * @constructor
* *

View File

@ -9,7 +9,6 @@ const MongoUtils = require('./utils');
* @classdesc Class to consume mongo oplog * @classdesc Class to consume mongo oplog
*/ */
class LogConsumer { class LogConsumer {
/** /**
* @constructor * @constructor
* *

View File

@ -538,7 +538,7 @@ class MongoClientInterface {
updateOne: { updateOne: {
// eslint-disable-next-line // eslint-disable-next-line
filter: { filter: {
_id: objName, '_id': objName,
'value.versionId': params.versionId, 'value.versionId': params.versionId,
}, },
update: { update: {
@ -607,7 +607,7 @@ class MongoClientInterface {
MongoUtils.serialize(mstObjVal); MongoUtils.serialize(mstObjVal);
// eslint-disable-next-line // eslint-disable-next-line
c.update({ c.update({
_id: objName, '_id': objName,
'value.versionId': { 'value.versionId': {
// We break the semantic correctness here with // We break the semantic correctness here with
// $gte instead of $gt because we do not have // $gte instead of $gt because we do not have
@ -760,7 +760,7 @@ class MongoClientInterface {
MongoUtils.serialize(objVal); MongoUtils.serialize(objVal);
// eslint-disable-next-line // eslint-disable-next-line
c.findOneAndReplace({ c.findOneAndReplace({
_id: objName, '_id': objName,
'value.isPHD': true, 'value.isPHD': true,
'value.versionId': mst.versionId, 'value.versionId': mst.versionId,
}, { }, {
@ -822,7 +822,7 @@ class MongoClientInterface {
// version: // version:
// eslint-disable-next-line // eslint-disable-next-line
c.findOneAndDelete({ c.findOneAndDelete({
_id: objName, '_id': objName,
'value.isPHD': true, 'value.isPHD': true,
'value.versionId': mst.versionId, 'value.versionId': mst.versionId,
}, {}, err => { }, {}, err => {
@ -1616,7 +1616,7 @@ class MongoClientInterface {
const retResult = this._handleResults(collRes, isVer); const retResult = this._handleResults(collRes, isVer);
retResult.stalled = stalledCount; retResult.stalled = stalledCount;
return callback(null, retResult); return callback(null, retResult);
} },
); );
} }

View File

@ -11,7 +11,6 @@ const requiresOneWorker = {
}; };
class Server { class Server {
/** /**
* Create a new Metadata Proxy Server instance * Create a new Metadata Proxy Server instance
* *

View File

@ -6,7 +6,7 @@
* @param {String} str - The string to compute the hash * @param {String} str - The string to compute the hash
* @return {Number} The computed hash * @return {Number} The computed hash
*/ */
function stringHash(str) { function stringHash(str: string): number {
let hash = 5381; let hash = 5381;
let i = str.length; let i = str.length;
@ -22,4 +22,4 @@ function stringHash(str) {
return hash >>> 0; return hash >>> 0;
} }
module.exports = stringHash; export default stringHash;

View File

@ -160,7 +160,7 @@ class TestMatrix {
const result = Object.keys(matrixChild.params) const result = Object.keys(matrixChild.params)
.every(currentKey => .every(currentKey =>
Object.prototype.toString.call( Object.prototype.toString.call(
matrixChild.params[currentKey] matrixChild.params[currentKey],
).indexOf('Array') === -1); ).indexOf('Array') === -1);
if (result === true) { if (result === true) {

View File

@ -1,6 +1,16 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const VID_SEP = require('./constants').VersioningConstants.VersionId.Separator; import { VersioningConstants } from './constants';
const VID_SEP = VersioningConstants.VersionId.Separator;
interface VersionContents {
isNull: boolean;
isDeleteMarker: boolean;
versionId: string;
otherInfo: any; // TODO type
}
/** /**
* Class for manipulating an object version. * Class for manipulating an object version.
@ -12,6 +22,9 @@ const VID_SEP = require('./constants').VersioningConstants.VersionId.Separator;
* use with a production setup with Metadata). * use with a production setup with Metadata).
*/ */
class Version { class Version {
version: VersionContents; // TODO type
/** /**
* Create a new version instantiation from its data object. * Create a new version instantiation from its data object.
* @param {object} version - the data object to instantiate * @param {object} version - the data object to instantiate
@ -20,7 +33,7 @@ class Version {
* @param {string} version.versionId - the version id * @param {string} version.versionId - the version id
* @constructor * @constructor
*/ */
constructor(version) { constructor(version: VersionContents) {
this.version = version || {}; this.version = version || {};
} }
@ -30,7 +43,7 @@ class Version {
* @param {string} value - the string to parse * @param {string} value - the string to parse
* @return {Version} - the version deserialized from the input string * @return {Version} - the version deserialized from the input string
*/ */
static from(value) { static from(value: string): Version {
return new Version(value ? JSON.parse(value) : undefined); return new Version(value ? JSON.parse(value) : undefined);
} }
@ -40,7 +53,7 @@ class Version {
* @param {string} value - version to check * @param {string} value - version to check
* @return {boolean} - whether this is a PHD version * @return {boolean} - whether this is a PHD version
*/ */
static isPHD(value) { static isPHD(value: string): boolean {
// check if the input is a valid version // check if the input is a valid version
if (!value) { if (!value) {
return false; return false;
@ -67,7 +80,7 @@ class Version {
* @param {string} versionId - versionId of the PHD version * @param {string} versionId - versionId of the PHD version
* @return {string} - the serialized version * @return {string} - the serialized version
*/ */
static generatePHDVersion(versionId) { static generatePHDVersion(versionId: string): string {
return `{ "isPHD": true, "versionId": "${versionId}" }`; return `{ "isPHD": true, "versionId": "${versionId}" }`;
} }
@ -79,7 +92,7 @@ class Version {
* @param {string} versionId - the versionId to append * @param {string} versionId - the versionId to append
* @return {string} - the object with versionId appended * @return {string} - the object with versionId appended
*/ */
static appendVersionId(value, versionId) { static appendVersionId(value: string, versionId: string): string {
// assuming value has the format of '{...}' // assuming value has the format of '{...}'
let index = value.length - 2; let index = value.length - 2;
while (value.charAt(index--) === ' '); while (value.charAt(index--) === ' ');
@ -93,7 +106,7 @@ class Version {
* *
* @return {boolean} - whether this is a PHD version * @return {boolean} - whether this is a PHD version
*/ */
isPHDVersion() { isPHDVersion(): boolean {
return this.version.isPHD || false; return this.version.isPHD || false;
} }
@ -102,7 +115,7 @@ class Version {
* *
* @return {boolean} - stating if the value is a null version * @return {boolean} - stating if the value is a null version
*/ */
isNullVersion() { isNullVersion(): boolean {
return this.version.isNull; return this.version.isNull;
} }
@ -112,7 +125,7 @@ class Version {
* @param {string} value - the stringified object to check * @param {string} value - the stringified object to check
* @return {boolean} - if the object is a delete marker * @return {boolean} - if the object is a delete marker
*/ */
static isDeleteMarker(value) { static isDeleteMarker(value: string): boolean {
const index = value.indexOf('isDeleteMarker'); const index = value.indexOf('isDeleteMarker');
if (index < 0) { if (index < 0) {
return false; return false;
@ -130,7 +143,7 @@ class Version {
* *
* @return {boolean} - stating if the value is a delete marker * @return {boolean} - stating if the value is a delete marker
*/ */
isDeleteMarkerVersion() { isDeleteMarkerVersion(): boolean {
return this.version.isDeleteMarker; return this.version.isDeleteMarker;
} }
@ -139,7 +152,7 @@ class Version {
* *
* @return {string} - the versionId * @return {string} - the versionId
*/ */
getVersionId() { getVersionId(): string {
return this.version.versionId; return this.version.versionId;
} }
@ -149,7 +162,7 @@ class Version {
* @param {string} versionId - the versionId * @param {string} versionId - the versionId
* @return {Version} - the updated version * @return {Version} - the updated version
*/ */
setVersionId(versionId) { setVersionId(versionId: string): Version {
this.version.versionId = versionId; this.version.versionId = versionId;
return this; return this;
} }
@ -159,7 +172,7 @@ class Version {
* *
* @return {Version} - the updated version * @return {Version} - the updated version
*/ */
setDeleteMarker() { setDeleteMarker(): Version {
this.version.isDeleteMarker = true; this.version.isDeleteMarker = true;
return this; return this;
} }
@ -169,7 +182,7 @@ class Version {
* *
* @return {Version} - the updated version * @return {Version} - the updated version
*/ */
setNullVersion() { setNullVersion(): Version {
this.version.isNull = true; this.version.isNull = true;
return this; return this;
} }
@ -179,14 +192,15 @@ class Version {
* *
* @return {string} - the serialized version * @return {string} - the serialized version
*/ */
toString() { toString(): string {
return JSON.stringify(this.version); return JSON.stringify(this.version);
} }
} }
// TODO type key can be array, str, ...
function isMasterKey(key) { function isMasterKey(key) {
return !key.includes(VID_SEP); return !key.includes(VID_SEP);
} }
module.exports = { Version, isMasterKey }; export { Version, isMasterKey };

View File

@ -6,9 +6,11 @@
// - rep_group_id 07 bytes replication group identifier // - rep_group_id 07 bytes replication group identifier
// - other_information arbitrary user input, such as a unique string // - other_information arbitrary user input, such as a unique string
const base62Integer = require('base62'); import * as base62 from 'base62';
const BASE62 = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'; const BASE62 = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
const base62String = require('base-x')(BASE62); import base from 'base-x'
const base62String = base(BASE62);
// the lengths of the components in bytes // the lengths of the components in bytes
const LENGTH_TS = 14; // timestamp: epoch in ms const LENGTH_TS = 14; // timestamp: epoch in ms
@ -178,8 +180,8 @@ function base62Encode(str) {
const part1 = Number(str.substring(0, B62V_HALF)); const part1 = Number(str.substring(0, B62V_HALF));
const part2 = Number(str.substring(B62V_HALF, B62V_TOTAL)); const part2 = Number(str.substring(B62V_HALF, B62V_TOTAL));
const part3 = Buffer.from(str.substring(B62V_TOTAL)); const part3 = Buffer.from(str.substring(B62V_TOTAL));
const enc1 = base62Integer.encode(part1); const enc1 = base62.encode(part1);
const enc2 = base62Integer.encode(part2); const enc2 = base62.encode(part2);
const enc3 = base62String.encode(part3); const enc3 = base62String.encode(part3);
return (B62V_EPAD + enc1).slice(-B62V_EPAD.length) + return (B62V_EPAD + enc1).slice(-B62V_EPAD.length) +
(B62V_EPAD + enc2).slice(-B62V_EPAD.length) + (B62V_EPAD + enc2).slice(-B62V_EPAD.length) +
@ -197,10 +199,10 @@ function base62Decode(str) {
try { try {
let start = 0; let start = 0;
const enc1 = str.substring(start, start + B62V_EPAD.length); const enc1 = str.substring(start, start + B62V_EPAD.length);
const orig1 = base62Integer.decode(enc1); const orig1 = base62.decode(enc1);
start += B62V_EPAD.length; start += B62V_EPAD.length;
const enc2 = str.substring(start, start + B62V_EPAD.length); const enc2 = str.substring(start, start + B62V_EPAD.length);
const orig2 = base62Integer.decode(enc2); const orig2 = base62.decode(enc2);
start += B62V_EPAD.length; start += B62V_EPAD.length;
const enc3 = str.substring(start); const enc3 = str.substring(start);
const orig3 = base62String.decode(enc3); const orig3 = base62String.decode(enc3);
@ -246,8 +248,15 @@ function decode(str) {
return hexDecode(str); return hexDecode(str);
} }
module.exports = { generateVersionId, getInfVid, export {
hexEncode, hexDecode, generateVersionId,
base62Encode, base62Decode, getInfVid,
encode, decode, hexEncode,
ENC_TYPE_HEX, ENC_TYPE_BASE62 }; hexDecode,
base62Encode,
base62Decode,
encode,
decode,
ENC_TYPE_HEX,
ENC_TYPE_BASE62
};

View File

@ -1,10 +1,12 @@
const errors = require('../errors'); import errors from '../errors';
const Version = require('./Version').Version; import { Version } from './Version';
import type WriteCache from './WriteCache';
const genVID = require('./VersionID').generateVersionId; import type WriteGatheringManager from './WriteGatheringManager';
import { generateVersionId as genVID } from './VersionID';
// some predefined constants // some predefined constants
const VID_SEP = require('./constants').VersioningConstants.VersionId.Separator; import { VersioningConstants } from './constants';
const VID_SEP = VersioningConstants.VersionId.Separator;
/** /**
* Increment the charCode of the last character of a valid string. * Increment the charCode of the last character of a valid string.
@ -12,7 +14,7 @@ const VID_SEP = require('./constants').VersioningConstants.VersionId.Separator;
* @param {string} prefix - the input string * @param {string} prefix - the input string
* @return {string} - the incremented string, or the input if it is not valid * @return {string} - the incremented string, or the input if it is not valid
*/ */
function getPrefixUpperBoundary(prefix) { function getPrefixUpperBoundary(prefix: string): string {
if (prefix) { if (prefix) {
return prefix.slice(0, prefix.length - 1) + return prefix.slice(0, prefix.length - 1) +
String.fromCharCode(prefix.charCodeAt(prefix.length - 1) + 1); String.fromCharCode(prefix.charCodeAt(prefix.length - 1) + 1);
@ -20,18 +22,34 @@ function getPrefixUpperBoundary(prefix) {
return prefix; return prefix;
} }
function formatVersionKey(key, versionId) { function formatVersionKey(key: string, versionId: string): string {
return `${key}${VID_SEP}${versionId}`; return `${key}${VID_SEP}${versionId}`;
} }
function formatCacheKey(db, key) { function formatCacheKey(db: string, key: string): string {
// using double VID_SEP to make sure the cache key is unique // using double VID_SEP to make sure the cache key is unique
return `${db}${VID_SEP}${VID_SEP}${key}`; return `${db}${VID_SEP}${VID_SEP}${key}`;
} }
const VID_SEPPLUS = getPrefixUpperBoundary(VID_SEP); const VID_SEPPLUS = getPrefixUpperBoundary(VID_SEP);
interface RepairHints {
type: string;
value: string;
nextValue: string;
}
class VersioningRequestProcessor { class VersioningRequestProcessor {
writeCache: WriteCache;
wgm: WriteGatheringManager;
replicationGroupId: any// TODO
uidCounter: number;
queue: object;
repairing: object;
/** /**
* This class takes a random string generator as additional input. * This class takes a random string generator as additional input.
* @param {WriteCache} writeCache - the WriteCache to which this * @param {WriteCache} writeCache - the WriteCache to which this
@ -43,7 +61,11 @@ class VersioningRequestProcessor {
* @param {string} versioning.replicationGroupId - replication group id * @param {string} versioning.replicationGroupId - replication group id
* @constructor * @constructor
*/ */
constructor(writeCache, writeGatheringManager, versioning) { constructor(
writeCache: WriteCache,
writeGatheringManager: WriteGatheringManager,
versioning: object
) {
this.writeCache = writeCache; this.writeCache = writeCache;
this.wgm = writeGatheringManager; this.wgm = writeGatheringManager;
this.replicationGroupId = versioning.replicationGroupId; this.replicationGroupId = versioning.replicationGroupId;
@ -69,13 +91,13 @@ class VersioningRequestProcessor {
* @param {function} callback - callback function * @param {function} callback - callback function
* @return {any} - to finish the call * @return {any} - to finish the call
*/ */
get(request, logger, callback) { get(request: object, logger: object, callback: Function) {
const { db, key, options } = request; const { db, key, options } = request;
if (options && options.versionId) { if (options && options.versionId) {
const versionKey = formatVersionKey(key, options.versionId); const versionKey = formatVersionKey(key, options.versionId);
return this.wgm.get({ db, key: versionKey }, logger, callback); return this.wgm.get({ db, key: versionKey }, logger, callback);
} }
return this.wgm.get(request, logger, (err, data) => { return this.wgm.get(request, logger, (err: Error, data) => {
if (err) { if (err) {
return callback(err); return callback(err);
} }
@ -103,7 +125,7 @@ class VersioningRequestProcessor {
* @param {function} callback - callback function * @param {function} callback - callback function
* @return {any} - to finish the call * @return {any} - to finish the call
*/ */
getByListing(request, logger, callback) { getByListing(request: object, logger: object, callback: Function): any {
// enqueue the get entry; do nothing if another is processing it // enqueue the get entry; do nothing if another is processing it
// this is to manage the number of expensive listings when there // this is to manage the number of expensive listings when there
// are multiple concurrent gets on the same key which is a PHD version // are multiple concurrent gets on the same key which is a PHD version
@ -156,7 +178,7 @@ class VersioningRequestProcessor {
* @param {function} callback - callback function * @param {function} callback - callback function
* @return {boolean} - this request is the first in the queue or not * @return {boolean} - this request is the first in the queue or not
*/ */
enqueueGet(request, logger, callback) { enqueueGet(request: object, logger: object, callback: Function): boolean {
const cacheKey = formatCacheKey(request.db, request.key); const cacheKey = formatCacheKey(request.db, request.key);
// enqueue the get entry if another is processing it // enqueue the get entry if another is processing it
if (this.queue[cacheKey]) { if (this.queue[cacheKey]) {
@ -179,7 +201,7 @@ class VersioningRequestProcessor {
* @param {string} value - resulting value of the first request * @param {string} value - resulting value of the first request
* @return {undefined} * @return {undefined}
*/ */
dequeueGet(request, err, value) { dequeueGet(request: object, err: object, value: string): undefined {
const cacheKey = formatCacheKey(request.db, request.key); const cacheKey = formatCacheKey(request.db, request.key);
if (this.queue[cacheKey]) { if (this.queue[cacheKey]) {
this.queue[cacheKey].forEach(entry => { this.queue[cacheKey].forEach(entry => {
@ -208,7 +230,7 @@ class VersioningRequestProcessor {
(for 'put') (for 'put')
* @return {any} - to finish the call * @return {any} - to finish the call
*/ */
repairMaster(request, logger, hints) { repairMaster(request: object, logger: object, hints: RepairHints): any {
const { db, key } = request; const { db, key } = request;
logger.info('start repair process', { request }); logger.info('start repair process', { request });
this.writeCache.get({ db, key }, logger, (err, value) => { this.writeCache.get({ db, key }, logger, (err, value) => {
@ -248,7 +270,7 @@ class VersioningRequestProcessor {
* @param {function} callback - expect callback(err, data) * @param {function} callback - expect callback(err, data)
* @return {any} - to finish the call * @return {any} - to finish the call
*/ */
put(request, logger, callback) { put(request: object, logger: object, callback: Function): any {
const { db, key, value, options } = request; const { db, key, value, options } = request;
// valid combinations of versioning options: // valid combinations of versioning options:
// - !versioning && !versionId: normal non-versioning put // - !versioning && !versionId: normal non-versioning put
@ -318,7 +340,7 @@ class VersioningRequestProcessor {
* @param {function} callback - expect callback(err, batch, versionId) * @param {function} callback - expect callback(err, batch, versionId)
* @return {any} - to finish the call * @return {any} - to finish the call
*/ */
processVersionSpecificPut(request, logger, callback) { processVersionSpecificPut(request: object, logger: object, callback: Function): any {
const { db, key } = request; const { db, key } = request;
// versionId is empty: update the master version // versionId is empty: update the master version
if (request.options.versionId === '') { if (request.options.versionId === '') {
@ -347,7 +369,7 @@ class VersioningRequestProcessor {
} }
del(request, logger, callback) { del(request: object, logger: object, callback: Function) {
const { db, key, options } = request; const { db, key, options } = request;
// no versioning or versioning configuration off // no versioning or versioning configuration off
if (!(options && options.versionId)) { if (!(options && options.versionId)) {
@ -379,7 +401,9 @@ class VersioningRequestProcessor {
* @param {function} callback - expect callback(err, batch, versionId) * @param {function} callback - expect callback(err, batch, versionId)
* @return {any} - to finish the call * @return {any} - to finish the call
*/ */
processVersionSpecificDelete(request, logger, callback) { processVersionSpecificDelete(
request: object, logger: object, callback: Function
): any {
const { db, key, options } = request; const { db, key, options } = request;
// deleting a specific version // deleting a specific version
this.writeCache.get({ db, key }, logger, (err, data) => { this.writeCache.get({ db, key }, logger, (err, data) => {
@ -406,4 +430,4 @@ class VersioningRequestProcessor {
} }
} }
module.exports = VersioningRequestProcessor; export default VersioningRequestProcessor;

View File

@ -1,6 +1,7 @@
'use strict'; // eslint-disable-line 'use strict'; // eslint-disable-line
const errors = require('../errors'); import errors from '../errors';
import type WriteGatheringManager from './WriteGatheringManager'
function formatCacheKey(db, key) { function formatCacheKey(db, key) {
return `${db}\0\0${key}`; return `${db}\0\0${key}`;
@ -25,7 +26,13 @@ function formatCacheKey(db, key) {
* remains only until the write is done and no other update is using it. * remains only until the write is done and no other update is using it.
*/ */
class WriteCache { class WriteCache {
constructor(wgm) {
wgm: WriteGatheringManager;
cache: object;
queue: object;
counter: number;
constructor(wgm: WriteGatheringManager) {
this.wgm = wgm; this.wgm = wgm;
// internal state // internal state
this.cache = {}; this.cache = {};
@ -43,7 +50,7 @@ class WriteCache {
* @param {function} callback - callback function: callback(error, value) * @param {function} callback - callback function: callback(error, value)
* @return {any} - to finish the call * @return {any} - to finish the call
*/ */
get(request, logger, callback) { get(request: object, logger: object, callback: Function): any {
const { db, key } = request; const { db, key } = request;
const cacheKey = formatCacheKey(db, key); const cacheKey = formatCacheKey(db, key);
@ -60,7 +67,7 @@ class WriteCache {
return null; return null;
} }
// no other is in progress, get the key from the database // no other is in progress, get the key from the database
return this.wgm.get(request, logger, (err, value) => { return this.wgm.get(request, logger, (err: Error, value: any) => {
// answer all the queued requests // answer all the queued requests
this._dequeue(cacheKey, signature, err, value); this._dequeue(cacheKey, signature, err, value);
}); });
@ -75,7 +82,7 @@ class WriteCache {
* entry in the queue (which will do the get from the * entry in the queue (which will do the get from the
* database), undefined otherwise * database), undefined otherwise
*/ */
_enqueue(cacheKey, callback) { _enqueue(cacheKey: string, callback: Function) {
if (this.queue[cacheKey]) { if (this.queue[cacheKey]) {
this.queue[cacheKey].queue.push(callback); this.queue[cacheKey].queue.push(callback);
return undefined; return undefined;
@ -94,7 +101,9 @@ class WriteCache {
* @param {boolean} force - force dequeuing even on signature mismatch * @param {boolean} force - force dequeuing even on signature mismatch
* @return {undefined} - nothing * @return {undefined} - nothing
*/ */
_dequeue(cacheKey, signature, err, value, force = false) { _dequeue(
cacheKey: string, signature: number, err: object, value: string, force = false
): undefined {
if (this.queue[cacheKey] === undefined) { if (this.queue[cacheKey] === undefined) {
return; return;
} }
@ -139,7 +148,7 @@ class WriteCache {
* @param {function} callback - asynchronous callback of the call * @param {function} callback - asynchronous callback of the call
* @return {undefined} * @return {undefined}
*/ */
batch(request, logger, callback) { batch(request: object, logger: object, callback: Function): undefined {
const { db, array } = request; const { db, array } = request;
const signature = this._cacheWrite(db, array); const signature = this._cacheWrite(db, array);
this.wgm.batch(request, logger, (err, data) => { this.wgm.batch(request, logger, (err, data) => {
@ -159,7 +168,7 @@ class WriteCache {
* @param {object} array - batch operation to apply on the database * @param {object} array - batch operation to apply on the database
* @return {string} - signature of the request * @return {string} - signature of the request
*/ */
_cacheWrite(db, array) { _cacheWrite(db: string, array: object): string {
const signature = this.counter++; const signature = this.counter++;
array.forEach(entry => { array.forEach(entry => {
const cacheKey = formatCacheKey(db, entry.key); const cacheKey = formatCacheKey(db, entry.key);
@ -177,7 +186,7 @@ class WriteCache {
* @param {string} signature - signature if temporarily cached * @param {string} signature - signature if temporarily cached
* @return {undefined} * @return {undefined}
*/ */
_cacheClear(db, array, signature) { _cacheClear(db: string, array: object, signature: string): undefined {
array.forEach(entry => { array.forEach(entry => {
const key = formatCacheKey(db, entry.key); const key = formatCacheKey(db, entry.key);
if (this.cache[key] && this.cache[key].signature === signature) { if (this.cache[key] && this.cache[key].signature === signature) {
@ -190,4 +199,4 @@ class WriteCache {
} }
} }
module.exports = WriteCache; export default WriteCache;

View File

@ -6,7 +6,11 @@ const WG_TIMEOUT = 5; // batching period in milliseconds
* from operations targeting the same database. * from operations targeting the same database.
*/ */
class WriteGatheringManager { class WriteGatheringManager {
constructor(db) {
db: any; // TODO type
dbState: object; // TODO type
constructor(db: any) {
this.db = db; this.db = db;
this.dbState = {}; this.dbState = {};
} }
@ -21,11 +25,11 @@ class WriteGatheringManager {
* @param {function} callback - callback function: callback(error, value) * @param {function} callback - callback function: callback(error, value)
* @return {any} - to finish the call * @return {any} - to finish the call
*/ */
get(request, logger, callback) { get(request: object, logger: object, callback: Function): any {
return this.db.get(request, logger, callback); return this.db.get(request, logger, callback);
} }
list(request, logger, callback) { list(request: object, logger: object, callback: Function) {
return this.db.list(request, logger, callback); return this.db.list(request, logger, callback);
} }
@ -39,7 +43,7 @@ class WriteGatheringManager {
* @param {function} callback - callback(err) * @param {function} callback - callback(err)
* @return {WriteGatheringManager} - return this * @return {WriteGatheringManager} - return this
*/ */
batch(request, logger, callback) { batch(request: object, logger: object, callback: Function): WriteGatheringManager {
const { db, array } = request; const { db, array } = request;
if (this.dbState[db] === undefined) { if (this.dbState[db] === undefined) {
this.dbState[db] = { db, isCommitting: false }; this.dbState[db] = { db, isCommitting: false };
@ -55,7 +59,7 @@ class WriteGatheringManager {
}; };
} }
const bCache = dbState.batchCache; const bCache = dbState.batchCache;
array.forEach((entry, index) => { array.forEach((entry: any, index: number) => {
bCache.batch.push(entry); bCache.batch.push(entry);
bCache.uids.push(logger.getSerializedUids()); bCache.uids.push(logger.getSerializedUids());
bCache.callback.push(index ? null : callback); bCache.callback.push(index ? null : callback);
@ -69,7 +73,7 @@ class WriteGatheringManager {
* @param {string} db - Name of the database * @param {string} db - Name of the database
* @return {any} - to finish the call * @return {any} - to finish the call
*/ */
_commitBatch(db) { _commitBatch(db: string): any {
const dbState = this.dbState[db]; const dbState = this.dbState[db];
const bCache = dbState.batchCache; const bCache = dbState.batchCache;
// do nothing if no batch to replicate // do nothing if no batch to replicate
@ -124,7 +128,7 @@ class WriteGatheringManager {
* @param {object} batch - the committed batch * @param {object} batch - the committed batch
* @return {undefined} - nothing * @return {undefined} - nothing
*/ */
_batchCommitted(error, batch) { _batchCommitted(error: object, batch: object): undefined {
batch.callback.forEach(callback => { batch.callback.forEach(callback => {
if (callback) { if (callback) {
callback(error); callback(error);
@ -133,4 +137,4 @@ class WriteGatheringManager {
} }
} }
module.exports = WriteGatheringManager; export default WriteGatheringManager;

View File

@ -1,4 +1,4 @@
module.exports.VersioningConstants = { const VersioningConstants = {
VersionId: { VersionId: {
Separator: '\0', Separator: '\0',
}, },
@ -16,3 +16,5 @@ module.exports.VersioningConstants = {
v1: 'v1', v1: 'v1',
}, },
}; };
export { VersioningConstants };

View File

@ -59,9 +59,10 @@
"@types/jest": "^27.0.3", "@types/jest": "^27.0.3",
"@types/node": "^16.11.7", "@types/node": "^16.11.7",
"babel-plugin-add-module-exports": "^1.0.4", "babel-plugin-add-module-exports": "^1.0.4",
"eslint": "2.13.1", "eslint": "^7.32.0",
"eslint-config-airbnb": "6.2.0", "eslint-config-airbnb": "6.2.0",
"eslint-config-scality": "scality/Guidelines#ec33dfb", "eslint-config-scality": "scality/Guidelines#ec33dfb",
"eslint-plugin-jest": "^24.7.0",
"eslint-plugin-react": "^4.3.0", "eslint-plugin-react": "^4.3.0",
"jest": "^27.4.5", "jest": "^27.4.5",
"mocha": "8.0.1", "mocha": "8.0.1",
@ -76,14 +77,15 @@
"lint_md": "mdlint $(git ls-files '*.md')", "lint_md": "mdlint $(git ls-files '*.md')",
"lint_yml": "yamllint $(git ls-files '*.yml')", "lint_yml": "yamllint $(git ls-files '*.yml')",
"test": "jest tests/unit", "test": "jest tests/unit",
"ft_test": "find tests/functional -name \"*.js\" | grep -v \"utils/\" | xargs mocha --timeout 120000 --exit", "ft_test": "jest tests/functional --testTimeout=120000 --forceExit",
"coverage": "yarn coverage_unit && yarn coverage_ft && yarn coverage_report", "coverage": "yarn coverage_unit && yarn coverage_ft && yarn coverage_report",
"coverage_unit": "nyc --silent yarn test", "coverage_unit": "yarn test --coverage",
"coverage_ft": "nyc --silent --no-clean yarn ft_test", "coverage_ft": "yarn ft_test --coverage",
"coverage_report": "nyc report --all --reporter=text-summary --reporter=lcov", "coverage_report": "nyc report --all --reporter=text-summary --reporter=lcov",
"compile": "tsc" "compile": "tsc"
}, },
"jest": { "jest": {
"collectCoverage": true,
"maxWorkers": 1, "maxWorkers": 1,
"roots": [ "roots": [
".", ".",
@ -94,7 +96,9 @@
"/node_modules/", "/node_modules/",
"helpers?\\.js", "helpers?\\.js",
"Mock.*\\.js", "Mock.*\\.js",
"Dummy.*\\.js" "Dummy.*\\.js",
"tests/functional/\\.*/utils\\.js",
"tests/functional/\\.*/utils/.*\\.js"
], ],
"testMatch": [ "testMatch": [
"**/*.js" "**/*.js"

View File

@ -41,7 +41,7 @@ describe('KMIP Low Level Driver', () => {
return done(err); return done(err);
} }
const responsePayload = response.lookup( const responsePayload = response.lookup(
'Response Message/Batch Item/Response Payload' 'Response Message/Batch Item/Response Payload',
)[0]; )[0];
assert.deepStrictEqual(responsePayload, assert.deepStrictEqual(responsePayload,
requestPayload); requestPayload);

View File

@ -7,7 +7,7 @@ const { logger } = require('../../utils/kmip/ersatz.js');
describe('KMIP Connection Management', () => { describe('KMIP Connection Management', () => {
let server; let server;
before(done => { beforeAll(done => {
server = net.createServer(conn => { server = net.createServer(conn => {
// abort the connection as soon as it is accepted // abort the connection as soon as it is accepted
conn.destroy(); conn.destroy();
@ -15,7 +15,7 @@ describe('KMIP Connection Management', () => {
server.listen(5696); server.listen(5696);
server.on('listening', done); server.on('listening', done);
}); });
after(done => { afterAll(done => {
server.close(done); server.close(done);
}); });

View File

@ -28,7 +28,7 @@ const mongoserver = new MongoMemoryReplSet({
describe('MongoClientInterface', () => { describe('MongoClientInterface', () => {
let metadata; let metadata;
before(done => { beforeAll(done => {
mongoserver.waitUntilRunning().then(() => { mongoserver.waitUntilRunning().then(() => {
const opts = { const opts = {
mongodb: { mongodb: {
@ -44,7 +44,7 @@ describe('MongoClientInterface', () => {
}); });
}); });
after(done => { afterAll(done => {
async.series([ async.series([
next => metadata.close(next), next => metadata.close(next),
next => mongoserver.stop() next => mongoserver.stop()

View File

@ -152,8 +152,8 @@ function _deleteObjects(objects, cb) {
} }
describe('Basic Metadata Proxy Server test', describe('Basic Metadata Proxy Server test',
function bindToThis() { () => {
this.timeout(10000); jest.setTimeout(10000);
it('Shoud get the metadataInformation', done => { it('Shoud get the metadataInformation', done => {
dispatcher.get('/default/metadataInformation', dispatcher.get('/default/metadataInformation',
(err, response, body) => { (err, response, body) => {
@ -167,8 +167,8 @@ describe('Basic Metadata Proxy Server test',
}); });
}); });
describe('Basic Metadata Proxy Server CRUD test', function bindToThis() { describe('Basic Metadata Proxy Server CRUD test', () => {
this.timeout(10000); jest.setTimeout(10000);
beforeEach(done => { beforeEach(done => {
dispatcher.post(`/default/bucket/${Bucket}`, bucketInfo, dispatcher.post(`/default/bucket/${Bucket}`, bucketInfo,

View File

@ -28,7 +28,7 @@ describe('StatsClient class', () => {
afterEach(() => redisClient.clear(() => {})); afterEach(() => redisClient.clear(() => {}));
after(() => redisClient.disconnect()); afterAll(() => redisClient.disconnect());
it('should correctly record a new request by default one increment', it('should correctly record a new request by default one increment',
done => { done => {

View File

@ -85,7 +85,7 @@ describe('LRUCache', () => {
assert.strictEqual(lru.get(100), undefined); assert.strictEqual(lru.get(100), undefined);
}); });
it('max 1000000 entries', function lru1M() { it('max 1000000 entries', () => {
// this test takes ~1-2 seconds on a laptop, nevertheless set a // this test takes ~1-2 seconds on a laptop, nevertheless set a
// large timeout to reduce the potential of flakiness on possibly // large timeout to reduce the potential of flakiness on possibly
// slower CI environment. // slower CI environment.

View File

@ -85,7 +85,7 @@ const nonAlphabeticalData = [
const receivedData = data.map(item => ({ key: item.key, value: item.value })); const receivedData = data.map(item => ({ key: item.key, value: item.value }));
const receivedNonAlphaData = nonAlphabeticalData.map( const receivedNonAlphaData = nonAlphabeticalData.map(
item => ({ key: item.key, value: item.value }) item => ({ key: item.key, value: item.value }),
); );
const tests = [ const tests = [

View File

@ -189,7 +189,7 @@ describe('MergeStream', () => {
`${usePauseResume ? ' with pause/resume' : ''}` + `${usePauseResume ? ' with pause/resume' : ''}` +
`${errorAtEnd ? ' with error' : ''}`; `${errorAtEnd ? ' with error' : ''}`;
it(`${nbEntries} sequential entries${fixtureDesc}`, it(`${nbEntries} sequential entries${fixtureDesc}`,
function bigMergeSequential(done) { (done) => {
jest.setTimeout(10000); jest.setTimeout(10000);
const stream1 = []; const stream1 = [];
const stream2 = []; const stream2 = [];
@ -204,7 +204,7 @@ describe('MergeStream', () => {
stream1, stream2, usePauseResume, errorAtEnd, done); stream1, stream2, usePauseResume, errorAtEnd, done);
}); });
it(`${nbEntries} randomly mingled entries${fixtureDesc}`, it(`${nbEntries} randomly mingled entries${fixtureDesc}`,
function bigMergeRandom(done) { (done) => {
jest.setTimeout(10000); jest.setTimeout(10000);
const stream1 = []; const stream1 = [];
const stream2 = []; const stream2 = [];

View File

@ -257,7 +257,7 @@ describe('Auth Backend: Chain Backend', () => {
id3: 'email3@test.com', id3: 'email3@test.com',
// id4 should be overwritten // id4 should be overwritten
id4: 'email5@test.com', id4: 'email5@test.com',
} },
); );
}); });
}); });

View File

@ -1,467 +0,0 @@
'use strict';// eslint-disable-line strict
const assert = require('assert');
const async = require('async');
const leveldb = require('level');
const temp = require('temp');
temp.track();
const db = require('../../index').db;
const errors = require('../../lib/errors');
const IndexTransaction = db.IndexTransaction;
const key1 = 'key1';
const key2 = 'key2';
const key3 = 'key3';
const value1 = 'value1';
const value2 = 'value2';
const value3 = 'value3';
function createDb() {
const indexPath = temp.mkdirSync();
return leveldb(indexPath, { valueEncoding: 'json' });
}
function checkValueInDb(db, k, v, done) {
db.get(k, (err, value) => {
if (err) {
return done(err);
}
if (value === v) {
return done();
}
return done(new Error('values differ'));
});
}
function checkValueNotInDb(db, k, done) {
db.get(k, (err, value) => {
if (!err || (err && !err.notFound)) {
return done(new Error(`value still in db: ${value}`));
}
return done();
});
}
function checkKeyNotExistsInDB(db, key, cb) {
return db.get(key, (err, value) => {
if (err && !err.notFound) {
return cb(err);
}
if (value) {
return cb(errors.PreconditionFailed);
}
return cb();
});
}
class ConditionalLevelDB {
constructor() {
this.db = createDb();
}
batch(operations, writeOptions, cb) {
return async.eachLimit(writeOptions.conditions, 10, (cond, asyncCallback) => {
switch (true) {
case ('notExists' in cond):
checkKeyNotExistsInDB(this.db, cond.notExists, asyncCallback);
break;
default:
asyncCallback(new Error('unsupported conditional operation'));
}
}, err => {
if (err) {
return cb(err);
}
return this.db.batch(operations, writeOptions, cb);
});
}
get client() {
return this.db;
}
}
describe('IndexTransaction', () => {
it('should allow put', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
transaction.push({
type: 'put',
key: 'k',
value: 'v',
});
transaction.commit(err => {
if (err) {
return done(err);
}
return checkValueInDb(db, 'k', 'v', done);
});
});
it('should allow del', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
transaction.push({
type: 'del',
key: 'k',
});
db.put('k', 'v', err => {
if (err) {
return done(err);
}
return transaction.commit(err => {
if (err) {
return done(err);
}
return checkValueNotInDb(db, 'k', done);
});
});
});
it('should commit put and del combined', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
transaction.push({
type: 'del',
key: 'k1',
});
transaction.push({
type: 'put',
key: 'k2',
value: 'v3',
});
function commitTransactionAndCheck(err) {
if (err) {
return done(err);
}
return transaction.commit(err => {
if (err) {
return done(err);
}
return checkValueNotInDb(db, 'k1', err => {
if (err) {
return done(err);
}
return checkValueInDb(db, 'k2', 'v3', done);
});
});
}
db.batch()
.put('k1', 'v1')
.put('k2', 'v2')
.write(commitTransactionAndCheck);
});
it('should refuse types other than del and put', done => {
const transaction = new IndexTransaction();
function tryPush() {
transaction.push({
type: 'append',
key: 'k',
value: 'v',
});
}
function validateError(err) {
if (err && err.invalidTransactionVerb) {
done();
return true;
}
return done(new Error('should have denied verb append'));
}
assert.throws(tryPush, validateError);
});
it('should refuse put without key', done => {
const transaction = new IndexTransaction();
function tryPush() {
transaction.push({
type: 'put',
value: 'v',
});
}
function validateError(err) {
if (err && err.missingKey) {
done();
return true;
}
return done(new Error('should have detected missing key'));
}
assert.throws(tryPush, validateError);
});
it('should refuse del without key', done => {
const transaction = new IndexTransaction();
function tryPush() {
transaction.push({
type: 'del',
});
}
function validateError(err) {
if (err && err.missingKey) {
done();
return true;
}
return done(new Error('should have detected missing key'));
}
assert.throws(tryPush, validateError);
});
it('should refuse put without value', done => {
const transaction = new IndexTransaction();
function tryPush() {
transaction.push({
type: 'put',
key: 'k',
});
}
function validateError(err) {
if (err && err.missingValue) {
done();
return true;
}
return done(new Error('should have detected missing value'));
}
assert.throws(tryPush, validateError);
});
it('should refuse to commit without any ops', done => {
const transaction = new IndexTransaction();
transaction.commit(err => {
if (err && err.emptyTransaction) {
return done();
}
return done(new Error('allowed to commit an empty transaction'));
});
});
it('should refuse to commit twice', done => {
const transaction = new IndexTransaction(createDb());
transaction.push({
type: 'put',
key: 'k',
value: 'v',
});
function tryCommitAgain(err) {
if (err) {
return done(err);
}
return transaction.commit(err2 => {
if (err2 && err2.alreadyCommitted) {
return done();
}
return done(new Error('allowed to commit twice'));
});
}
transaction.commit(tryCommitAgain);
});
it('should refuse add an op if already committed', done => {
const transaction = new IndexTransaction(createDb());
function push() {
transaction.push({
type: 'put',
key: 'k',
value: 'v',
});
}
function validateError(err) {
if (err && err.pushOnCommittedTransaction) {
done();
return true;
}
return done(new Error());
}
function tryPushAgain(err) {
if (err) {
return done(err);
}
return assert.throws(push, validateError);
}
push();
transaction.commit(tryPushAgain);
});
it('should have a working put shortcut method', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
transaction.put('k', 'v');
transaction.commit(err => {
if (err) {
return done(err);
}
return checkValueInDb(db, 'k', 'v', done);
});
});
it('should have a working del shortcut method', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
transaction.del('k');
db.put('k', 'v', err => {
if (err) {
return done(err);
}
return transaction.commit(err => {
if (err) {
return done(err);
}
return checkValueNotInDb(db, 'k', done);
});
});
});
it('should allow batch operation with notExists condition if key does not exist', done => {
const db = new ConditionalLevelDB();
const { client } = db;
const transaction = new IndexTransaction(db);
transaction.addCondition({ notExists: key1 });
transaction.push({
type: 'put',
key: key1,
value: value1,
});
return async.series([
next => transaction.commit(next),
next => client.get(key1, next),
], (err, res) => {
assert.ifError(err);
assert.strictEqual(res[1], value1);
return done();
});
});
it('should have a working addCondition shortcut method', done => {
const db = new ConditionalLevelDB();
const { client } = db;
const transaction = new IndexTransaction(db);
transaction.put(key1, value1);
transaction.addCondition({ notExists: 'key1' });
transaction.commit(err => {
if (err) {
return done(err);
}
return checkValueInDb(client, key1, value1, done);
});
});
it('should not allow any op in a batch operation with notExists condition if key exists', done => {
const db = new ConditionalLevelDB();
const { client } = db;
const transaction = new IndexTransaction(db);
function tryPushAgain(err) {
if (err) {
return done(err);
}
transaction.addCondition({ notExists: key1 });
transaction.push({
type: 'put',
key: key1,
value: value1,
});
transaction.push({
type: 'put',
key: key2,
value: value2,
});
transaction.push({
type: 'put',
key: key3,
value: value3,
});
return transaction.commit(err => {
if (!err || !err.PreconditionFailed) {
return done(new Error('should not be able to conditional put for duplicate key'));
}
return async.parallel([
next => checkKeyNotExistsInDB(client, key2, next),
next => checkKeyNotExistsInDB(client, key3, next),
], err => {
assert.ifError(err);
return done();
});
});
}
client.batch()
.put(key1, value1)
.write(tryPushAgain);
});
it('should not allow batch operation with empty condition', done => {
const transaction = new IndexTransaction();
try {
transaction.addCondition({});
done(new Error('should fail for empty condition'));
} catch (err) {
assert.strictEqual(err.missingCondition, true);
done();
}
});
it('should not allow batch operation with unsupported condition', done => {
const transaction = new IndexTransaction();
try {
transaction.addCondition({ exists: key1 });
done(new Error('should fail for unsupported condition, currently supported - notExists'));
} catch (err) {
assert.strictEqual(err.unsupportedConditionalOperation, true);
done();
}
});
});

View File

@ -57,7 +57,6 @@ function zpad(key, length = 15) {
} }
class DummyRequestLogger { class DummyRequestLogger {
constructor() { constructor() {
this.ops = []; this.ops = [];
this.counts = { this.counts = {

View File

@ -69,7 +69,7 @@ describe('Check IP matches a list of CIDR ranges', () => {
[['192.168.1.1'], '192.168.1.1'], [['192.168.1.1'], '192.168.1.1'],
].forEach(item => ].forEach(item =>
it(`should match IP ${item[0][0]} without CIDR range`, it(`should match IP ${item[0][0]} without CIDR range`,
() => cidrListMatchCheck(item[0], item[1], true)) () => cidrListMatchCheck(item[0], item[1], true)),
); );
it('should not range match if CIDR range is not provided', it('should not range match if CIDR range is not provided',

View File

@ -631,7 +631,7 @@ Object.keys(acl).forEach(
dummyBucket.getUid(), testUid); dummyBucket.getUid(), testUid);
}); });
}); });
}) }),
); );
describe('uid default', () => { describe('uid default', () => {

View File

@ -47,7 +47,7 @@ describe('network.probe.Utils', () => {
{ {
errorType: 'MethodNotAllowed', errorType: 'MethodNotAllowed',
errorMessage: errors.MethodNotAllowed.description, errorMessage: errors.MethodNotAllowed.description,
} },
); );
done(); done();
}), }),
@ -64,7 +64,7 @@ describe('network.probe.Utils', () => {
{ {
errorType: 'MethodNotAllowed', errorType: 'MethodNotAllowed',
errorMessage: 'Very much not allowed', errorMessage: 'Very much not allowed',
} },
); );
done(); done();
}), }),

View File

@ -317,9 +317,9 @@ describe('patch location constriants', () => {
patchLocations( patchLocations(
{ [locationName]: locations }, { [locationName]: locations },
{ privateKey }, { privateKey },
mockLog mockLog,
), ),
{ [locationName]: expected } { [locationName]: expected },
); );
}); });
}); });
@ -330,9 +330,9 @@ describe('patch location constriants', () => {
patchLocations( patchLocations(
undefined, undefined,
{ privateKey }, { privateKey },
mockLog mockLog,
), ),
{} {},
); );
}); });
@ -345,9 +345,9 @@ describe('patch location constriants', () => {
}, },
}, },
{ privateKey }, { privateKey },
mockLog mockLog,
), ),
{} {},
); );
}); });
}); });

View File

@ -87,7 +87,7 @@ const operations = [
}, },
]; ];
describe('GcpService request behavior', function testSuite() { describe('GcpService request behavior', () => {
jest.setTimeout(120000); jest.setTimeout(120000);
let httpServer; let httpServer;
let client; let client;
@ -125,7 +125,7 @@ describe('GcpService request behavior', function testSuite() {
}); });
}); });
describe('GcpService pathStyle tests', function testSuite() { describe('GcpService pathStyle tests', () => {
jest.setTimeout(120000); jest.setTimeout(120000);
let httpServer; let httpServer;
let client; let client;
@ -159,7 +159,7 @@ describe('GcpService pathStyle tests', function testSuite() {
})); }));
}); });
describe('GcpService dnsStyle tests', function testSuite() { describe('GcpService dnsStyle tests', () => {
jest.setTimeout(120000); jest.setTimeout(120000);
let httpServer; let httpServer;
let client; let client;

View File

@ -82,7 +82,6 @@ const malformedLogEntry = new MockStream(malformedLogEntryData);
// mock a simple bucketclient to get a fake raft log // mock a simple bucketclient to get a fake raft log
class BucketClientMock { class BucketClientMock {
getRaftLog(raftId, start, limit, targetLeader, reqUids, callback) { getRaftLog(raftId, start, limit, targetLeader, reqUids, callback) {
switch (raftId) { switch (raftId) {
case 0: case 0:

View File

@ -395,7 +395,7 @@ describe('MongoClientInterface::_processEntryData', () => {
tests.forEach(([msg, isTransient, params, expected]) => it(msg, () => { tests.forEach(([msg, isTransient, params, expected]) => it(msg, () => {
assert.deepStrictEqual( assert.deepStrictEqual(
mongoTestClient._processEntryData(params, isTransient), mongoTestClient._processEntryData(params, isTransient),
expected expected,
); );
})); }));
}); });
@ -498,7 +498,7 @@ describe('MongoClientInterface::_isReplicationEntryStalled', () => {
tests.forEach(([msg, params, expected]) => it(msg, () => { tests.forEach(([msg, params, expected]) => it(msg, () => {
assert.deepStrictEqual( assert.deepStrictEqual(
mongoTestClient._isReplicationEntryStalled(params, testDate), mongoTestClient._isReplicationEntryStalled(params, testDate),
expected expected,
); );
})); }));
}); });

View File

@ -32,7 +32,7 @@ describe('StringHash', () => {
done(); done();
}); });
it(`Should distribute uniformly with a maximum of ${ERROR}% of deviation`, it(`Should distribute uniformly with a maximum of ${ERROR}% of deviation`,
function f(done) { (done) => {
jest.setTimeout(20000); jest.setTimeout(20000);
const strings = new Array(STRING_COUNT).fill('') const strings = new Array(STRING_COUNT).fill('')
.map(() => randomString(10)); .map(() => randomString(10));

View File

@ -1,4 +1,4 @@
const VID = require('../../../lib/versioning/VersionID.js'); const VID = require('../../../lib/versioning/VersionID');
const assert = require('assert'); const assert = require('assert');
function randkey(length) { function randkey(length) {

View File

@ -177,7 +177,7 @@ class LoopbackServerChannel extends EchoChannel {
serverExtensions.map(extension => serverExtensions.map(extension =>
this.KMIP.TextString( this.KMIP.TextString(
extension.name, extension.name,
extension.value) extension.value),
))); )));
} }
if (queryFunctions.includes('Query Extension Map')) { if (queryFunctions.includes('Query Extension Map')) {

View File

@ -50,7 +50,6 @@ class EchoChannel extends EventEmitter {
this.clogged = true; this.clogged = true;
return this; return this;
} }
} }
class MirrorChannel extends EchoChannel { class MirrorChannel extends EchoChannel {

1103
yarn.lock

File diff suppressed because it is too large Load Diff