Compare commits

..

10 Commits

Author SHA1 Message Date
Jordi Bertran de Balanda ac852727ba fix ft_test 2022-01-13 17:30:42 +01:00
Jordi Bertran de Balanda 151fde6a35 fix lint after eslint upgrade 2022-01-13 15:14:13 +01:00
Jordi Bertran de Balanda 2dc3ac6bb6 upgrade eslint for jest plugin 2022-01-13 15:14:13 +01:00
Jordi Bertran de Balanda 476da8ed62 more migrations 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda 27e2e2393c migrate Extension 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda ab06f6f7fb more migration 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda 62d66e89ac migrate LRUCache 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda 1fc0efeb78 deprecate leveldb support 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda 8c2e95e31b initial batch of migrations 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda e8bd68619d make index imports coherent 2022-01-12 18:35:33 +01:00
196 changed files with 4582 additions and 5011 deletions

View File

@ -1 +0,0 @@
{ "extends": "scality" }

7
.eslintrc.json Normal file
View File

@ -0,0 +1,7 @@
{
"extends": ["scality"],
"plugins": ["jest"],
"env": {
"jest/globals": true
}
}

View File

@ -1,8 +1,8 @@
module.exports = {
presets: [
['@babel/preset-env', {targets: {node: 'current'}}],
['@babel/preset-env', { targets: { node: 'current' } }],
'@babel/preset-typescript',
],
plugins: ['add-module-exports']
plugins: ['add-module-exports'],
};

View File

@ -1,16 +1,15 @@
module.exports = {
export default {
auth: require('./lib/auth/auth'),
constants: require('./lib/constants'),
db: require('./lib/db'),
errors: require('./lib/errors.js'),
errors: require('./lib/errors'),
errorUtils: require('./lib/errorUtils'),
shuffle: require('./lib/shuffle'),
stringHash: require('./lib/stringHash'),
ipCheck: require('./lib/ipCheck'),
jsutil: require('./lib/jsutil'),
https: {
ciphers: require('./lib/https/ciphers.js'),
dhparam: require('./lib/https/dh2048.js'),
ciphers: require('./lib/https/ciphers'),
dhparam: require('./lib/https/dh2048'),
},
algorithms: {
list: require('./lib/algos/list/exportAlgos'),
@ -25,23 +24,23 @@ module.exports = {
},
},
policies: {
evaluators: require('./lib/policyEvaluator/evaluator.js'),
evaluators: require('./lib/policyEvaluator/evaluator'),
validateUserPolicy: require('./lib/policy/policyValidator')
.validateUserPolicy,
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
RequestContext: require('./lib/policyEvaluator/RequestContext'),
requestUtils: require('./lib/policyEvaluator/requestUtils'),
actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
},
Clustering: require('./lib/Clustering'),
testing: {
matrix: require('./lib/testing/matrix.js'),
matrix: require('./lib/testing/matrix'),
},
versioning: {
VersioningConstants: require('./lib/versioning/constants.js')
VersioningConstants: require('./lib/versioning/constants')
.VersioningConstants,
Version: require('./lib/versioning/Version.js').Version,
VersionID: require('./lib/versioning/VersionID.js'),
Version: require('./lib/versioning/Version').Version,
VersionID: require('./lib/versioning/VersionID'),
},
network: {
http: {
@ -57,8 +56,8 @@ module.exports = {
probe: {
ProbeServer: require('./lib/network/probe/ProbeServer'),
HealthProbeServer:
require('./lib/network/probe/HealthProbeServer.js'),
Utils: require('./lib/network/probe/Utils.js'),
require('./lib/network/probe/HealthProbeServer'),
Utils: require('./lib/network/probe/Utils'),
},
kmip: require('./lib/network/kmip'),
kmipClient: require('./lib/network/kmip/Client'),

View File

@ -1,6 +1,7 @@
'use strict'; // eslint-disable-line
const cluster = require('cluster');
import * as cluster from 'cluster';
class Clustering {
/**
@ -12,20 +13,31 @@ class Clustering {
* releasing ressources
* @return {Clustering} itself
*/
constructor(size, logger, shutdownTimeout) {
this._size = size;
size: number;
shutdownTimeout: number;
logger: any; // TODO logger ???
shutdown: boolean;
workers: cluster.Worker[];
workersTimeout: NodeJS.Timeout[]; // TODO array of worker timeouts
workersStatus: number[];
status: number;
exitCb?: Function;
index?: number;
constructor(size: number, logger: any, shutdownTimeout=5000) {
if (size < 1) {
throw new Error('Cluster size must be greater than or equal to 1');
}
this._shutdownTimeout = shutdownTimeout || 5000;
this._logger = logger;
this._shutdown = false;
this._workers = new Array(size).fill(undefined);
this._workersTimeout = new Array(size).fill(undefined);
this._workersStatus = new Array(size).fill(undefined);
this._status = 0;
this._exitCb = undefined; // Exit callback
this._index = undefined;
this.size = size;
this.shutdownTimeout = shutdownTimeout || 5000;
this.logger = logger;
this.shutdown = false;
this.workers = new Array(size).fill(undefined);
this.workersTimeout = new Array(size).fill(undefined);
this.workersStatus = new Array(size).fill(undefined);
this.status = 0;
this.exitCb = undefined; // Exit callback
this.index = undefined;
}
/**
@ -34,23 +46,23 @@ class Clustering {
* @private
* @return {undefined}
*/
_afterStop() {
_afterStop(): undefined {
// Asuming all workers shutdown gracefully
this._status = 0;
const size = this._size;
this.status = 0;
const size = this.size;
for (let i = 0; i < size; ++i) {
// If the process return an error code or killed by a signal,
// set the status
if (typeof this._workersStatus[i] === 'number') {
this._status = this._workersStatus[i];
if (typeof this.workersStatus[i] === 'number') {
this.status = this.workersStatus[i];
break;
} else if (typeof this._workersStatus[i] === 'string') {
this._status = 1;
} else if (typeof this.workersStatus[i] === 'string') {
this.status = 1;
break;
}
}
if (this._exitCb) {
return this._exitCb(this);
if (this.exitCb) {
return this.exitCb(this);
}
return process.exit(this.getStatus());
}
@ -64,45 +76,47 @@ class Clustering {
* @param {string} signal - Exit signal
* @return {undefined}
*/
_workerExited(worker, i, code, signal) {
_workerExited(
worker: cluster.Worker, index: number, code: number, signal: number
): undefined {
// If the worker:
// - was killed by a signal
// - return an error code
// - or just stopped
if (signal) {
this._logger.info('Worker killed by signal', {
this.logger.info('Worker killed by signal', {
signal,
id: i,
id: index,
childPid: worker.process.pid,
});
this._workersStatus[i] = signal;
this.workersStatus[index] = signal;
} else if (code !== 0) {
this._logger.error('Worker exit with code', {
this.logger.error('Worker exit with code', {
code,
id: i,
id: index,
childPid: worker.process.pid,
});
this._workersStatus[i] = code;
this.workersStatus[index] = code;
} else {
this._logger.info('Worker shutdown gracefully', {
id: i,
this.logger.info('Worker shutdown gracefully', {
id: index,
childPid: worker.process.pid,
});
this._workersStatus[i] = undefined;
this.workersStatus[index] = undefined;
}
this._workers[i] = undefined;
if (this._workersTimeout[i]) {
clearTimeout(this._workersTimeout[i]);
this._workersTimeout[i] = undefined;
this.workers[index] = undefined;
if (this.workersTimeout[index]) {
clearTimeout(this.workersTimeout[index]);
this.workersTimeout[index] = undefined;
}
// If we don't trigger the stop method, the watchdog
// will autorestart the worker
if (this._shutdown === false) {
return process.nextTick(() => this.startWorker(i));
if (this.shutdown === false) {
return process.nextTick(() => this.startWorker(index));
}
// Check if an worker is still running
if (!this._workers.every(cur => cur === undefined)) {
return undefined;
if (!this.workers.every(cur => cur === undefined)) {
return;
}
return this._afterStop();
}
@ -113,26 +127,26 @@ class Clustering {
* @param {number} i Index of the starting worker
* @return {undefined}
*/
startWorker(i) {
startWorker(index: number): undefined {
if (!cluster.isMaster) {
return;
}
// Fork a new worker
this._workers[i] = cluster.fork();
this.workers[index] = cluster.fork();
// Listen for message from the worker
this._workers[i].on('message', msg => {
this.workers[index].on('message', msg => {
// If the worker is ready, send him his id
if (msg === 'ready') {
this._workers[i].send({ msg: 'setup', id: i });
this.workers[index].send({ msg: 'setup', id: index });
}
});
this._workers[i].on('exit', (code, signal) =>
this._workerExited(this._workers[i], i, code, signal));
this.workers[index].on('exit', (code, signal) =>
this._workerExited(this.workers[index], index, code, signal));
// Trigger when the worker was started
this._workers[i].on('online', () => {
this._logger.info('Worker started', {
id: i,
childPid: this._workers[i].process.pid,
this.workers[index].on('online', () => {
this.logger.info('Worker started', {
id: index,
childPid: this.workers[index].process.pid,
});
});
}
@ -143,8 +157,8 @@ class Clustering {
* @param {function} cb - Callback(Clustering, [exitSignal])
* @return {Clustering} Itself
*/
onExit(cb) {
this._exitCb = cb;
onExit(cb: Function): Clustering {
this.exitCb = cb;
return this;
}
@ -155,21 +169,21 @@ class Clustering {
* @param {function} cb - Callback to run the worker
* @return {Clustering} itself
*/
start(cb) {
start(cb: Function): Clustering {
process.on('SIGINT', () => this.stop('SIGINT'));
process.on('SIGHUP', () => this.stop('SIGHUP'));
process.on('SIGQUIT', () => this.stop('SIGQUIT'));
process.on('SIGTERM', () => this.stop('SIGTERM'));
process.on('SIGPIPE', () => {});
process.on('exit', (code, signal) => {
if (this._exitCb) {
this._status = code || 0;
return this._exitCb(this, signal);
if (this.exitCb) {
this.status = code || 0;
return this.exitCb(this, signal);
}
return process.exit(code || 0);
});
process.on('uncaughtException', err => {
this._logger.fatal('caught error', {
this.logger.fatal('caught error', {
error: err.message,
stack: err.stack.split('\n').map(str => str.trim()),
});
@ -180,7 +194,7 @@ class Clustering {
// know the id of the slave cluster
process.on('message', msg => {
if (msg.msg === 'setup') {
this._index = msg.id;
this.index = msg.id;
cb(this);
}
});
@ -188,7 +202,7 @@ class Clustering {
// the worker has started
process.send('ready');
} else {
for (let i = 0; i < this._size; ++i) {
for (let i = 0; i < this.size; ++i) {
this.startWorker(i);
}
}
@ -200,8 +214,8 @@ class Clustering {
*
* @return {Cluster.Worker[]} Workers
*/
getWorkers() {
return this._workers;
getWorkers(): cluster.Worker[] {
return this.workers;
}
/**
@ -209,8 +223,8 @@ class Clustering {
*
* @return {number} Status code
*/
getStatus() {
return this._status;
getStatus(): number {
return this.status;
}
/**
@ -218,8 +232,8 @@ class Clustering {
*
* @return {boolean} - True if master, false otherwise
*/
isMaster() {
return this._index === undefined;
isMaster(): boolean {
return this.index === undefined;
}
/**
@ -227,8 +241,8 @@ class Clustering {
*
* @return {number|undefined} Worker index, undefined if it's master
*/
getIndex() {
return this._index;
getIndex(): number {
return this.index;
}
/**
@ -237,22 +251,22 @@ class Clustering {
* @param {string} signal - Set internally when processes killed by signal
* @return {undefined}
*/
stop(signal) {
stop(signal: string): undefined {
if (!cluster.isMaster) {
if (this._exitCb) {
return this._exitCb(this, signal);
if (this.exitCb) {
return this.exitCb(this, signal);
}
return process.exit(0);
}
this._shutdown = true;
return this._workers.forEach((worker, i) => {
this.shutdown = true;
return this.workers.forEach((worker, index) => {
if (!worker) {
return undefined;
}
this._workersTimeout[i] = setTimeout(() => {
this.workersTimeout[index] = setTimeout(() => {
// Kill the worker if the sigterm was ignored or take too long
process.kill(worker.process.pid, 'SIGKILL');
}, this._shutdownTimeout);
}, this.shutdownTimeout);
// Send sigterm to the process, allowing to release ressources
// and save some states
return process.kill(worker.process.pid, 'SIGTERM');
@ -260,4 +274,4 @@ class Clustering {
}
}
module.exports = Clustering;
export default Clustering;

View File

@ -1,4 +1,4 @@
const assert = require('assert');
import { strict as assert } from 'assert';
/**
* @class
@ -12,9 +12,15 @@ class LRUCache {
* @param {number} maxEntries - maximum number of entries kept in
* the cache
*/
constructor(maxEntries) {
maxEntries: number;
private entryCount: number;
private entryMap: object;
private lruHead: any; // TODO lruTrail?
private lruTail: any; // TODO lruTrail?
constructor(maxEntries: number) {
assert(maxEntries >= 1);
this._maxEntries = maxEntries;
this.maxEntries = maxEntries;
this.clear();
}
@ -27,8 +33,8 @@ class LRUCache {
* @return {boolean} true if the cache contained an entry with
* this key, false if it did not
*/
add(key, value) {
let entry = this._entryMap[key];
add(key: string, value: object): boolean {
let entry = this.entryMap[key];
if (entry) {
entry.value = value;
// make the entry the most recently used by re-pushing it
@ -37,15 +43,15 @@ class LRUCache {
this._lruPushEntry(entry);
return true;
}
if (this._entryCount === this._maxEntries) {
if (this.entryCount === this.maxEntries) {
// if the cache is already full, abide by the LRU strategy
// and remove the least recently used entry from the cache
// before pushing the new entry
this._removeEntry(this._lruTail);
this._removeEntry(this.lruTail);
}
entry = { key, value };
this._entryMap[key] = entry;
this._entryCount += 1;
this.entryMap[key] = entry;
this.entryCount += 1;
this._lruPushEntry(entry);
return false;
}
@ -59,8 +65,8 @@ class LRUCache {
* exists in the cache, or undefined if not found - either if the
* key was never added or if it has been evicted from the cache.
*/
get(key) {
const entry = this._entryMap[key];
get(key: string): object | undefined{
const entry = this.entryMap[key];
if (entry) {
// make the entry the most recently used by re-pushing it
// to the head of the LRU list
@ -79,8 +85,8 @@ class LRUCache {
* there was no entry with this key in the cache - either if the
* key was never added or if it has been evicted from the cache.
*/
remove(key) {
const entry = this._entryMap[key];
remove(key: string): boolean {
const entry = this.entryMap[key];
if (entry) {
this._removeEntry(entry);
return true;
@ -93,8 +99,8 @@ class LRUCache {
*
* @return {number} current number of cached entries
*/
count() {
return this._entryCount;
count(): number {
return this.entryCount;
}
/**
@ -102,11 +108,11 @@ class LRUCache {
*
* @return {undefined}
*/
clear() {
this._entryMap = {};
this._entryCount = 0;
this._lruHead = null;
this._lruTail = null;
clear(): undefined {
this.entryMap = {};
this.entryCount = 0;
this.lruHead = null;
this.lruTail = null;
}
/**
@ -116,16 +122,16 @@ class LRUCache {
* @param {object} entry - entry to push
* @return {undefined}
*/
_lruPushEntry(entry) {
_lruPushEntry(entry: object): undefined {
/* eslint-disable no-param-reassign */
entry._lruNext = this._lruHead;
entry._lruNext = this.lruHead;
entry._lruPrev = null;
if (this._lruHead) {
this._lruHead._lruPrev = entry;
if (this.lruHead) {
this.lruHead._lruPrev = entry;
}
this._lruHead = entry;
if (!this._lruTail) {
this._lruTail = entry;
this.lruHead = entry;
if (!this.lruTail) {
this.lruTail = entry;
}
/* eslint-enable no-param-reassign */
}
@ -136,17 +142,17 @@ class LRUCache {
* @param {object} entry - entry to remove
* @return {undefined}
*/
_lruRemoveEntry(entry) {
_lruRemoveEntry(entry): undefined {
/* eslint-disable no-param-reassign */
if (entry._lruPrev) {
entry._lruPrev._lruNext = entry._lruNext;
} else {
this._lruHead = entry._lruNext;
this.lruHead = entry._lruNext;
}
if (entry._lruNext) {
entry._lruNext._lruPrev = entry._lruPrev;
} else {
this._lruTail = entry._lruPrev;
this.lruTail = entry._lruPrev;
}
/* eslint-enable no-param-reassign */
}
@ -157,11 +163,11 @@ class LRUCache {
* @param {object} entry - cache entry to remove
* @return {undefined}
*/
_removeEntry(entry) {
_removeEntry(entry: object): undefined {
this._lruRemoveEntry(entry);
delete this._entryMap[entry.key];
this._entryCount -= 1;
delete this.entryMap[entry.key];
this.entryCount -= 1;
}
}
module.exports = LRUCache;
export default LRUCache;

View File

@ -1,6 +1,6 @@
'use strict'; // eslint-disable-line strict
const { FILTER_SKIP, SKIP_NONE } = require('./tools');
import { FILTER_SKIP, SKIP_NONE } from './tools';
// Use a heuristic to amortize the cost of JSON
// serialization/deserialization only on largest metadata where the
@ -22,7 +22,7 @@ const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
/**
* Base class of listing extensions.
*/
class Extension {
export class Extension {
/**
* This takes a list of parameters and a logger as the inputs.
* Derivatives should have their own format regarding parameters.
@ -31,7 +31,13 @@ class Extension {
* @param {RequestLogger} logger - the logger
* @constructor
*/
constructor(parameters, logger) {
parameters: any;
logger: any;
res: any;
keys: number;
constructor(parameters: any, logger: any) {
// inputs
this.parameters = parameters;
this.logger = logger;
@ -51,7 +57,7 @@ class Extension {
* heavy unused fields, or left untouched (depending on size
* heuristics)
*/
trimMetadata(value) {
trimMetadata(value: string): string {
let ret = undefined;
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
try {
@ -81,7 +87,7 @@ class Extension {
*
* @return {object} - listing parameters for metadata
*/
genMDParams() {
genMDParams(): object {
return {};
}
@ -96,7 +102,7 @@ class Extension {
* = 0: entry is accepted but not included (skipping)
* < 0: entry is not accepted, listing should finish
*/
filter(entry) {
filter(entry: any): number {
return entry ? FILTER_SKIP : FILTER_SKIP;
}
@ -108,7 +114,7 @@ class Extension {
* @return {string} - the insight: a common prefix or a master key,
* or SKIP_NONE if there is no insight
*/
skipping() {
skipping(): string {
return SKIP_NONE;
}
@ -116,9 +122,7 @@ class Extension {
* Get the listing resutls. Format depends on derivatives' specific logic.
* @return {Array} - The listed elements
*/
result() {
result(): any {
return this.res;
}
}
module.exports.default = Extension;

View File

@ -1,16 +1,37 @@
'use strict'; // eslint-disable-line strict
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT } = require('./tools');
import { inc, checkLimit, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT } from './tools';
import { VersioningConstants as VSConst} from '../../versioning/constants';
const DEFAULT_MAX_KEYS = 1000;
const VSConst = require('../../versioning/constants').VersioningConstants;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
function numberDefault(num, defaultNum) {
function numberDefault(num: string, defaultNum: number): number {
const parsedNum = Number.parseInt(num, 10);
return Number.isNaN(parsedNum) ? defaultNum : parsedNum;
}
interface MPUParams {
delimiter: any;
splitter: any;
prefix: any; // TODO type
uploadIdMarker: any; // TODO type
maxKeys: string;
queryPrefixLength: string;
keyMarker?: any; // TODO type
}
interface V0Params {
gt?: string;
gte?: string;
lt?: string;
lte?: string;
}
/**
* Class for the MultipartUploads extension
*/
@ -23,7 +44,22 @@ class MultipartUploads {
* @param {String} [vFormat] - versioning key format
* @return {undefined}
*/
constructor(params, logger, vFormat) {
params: MPUParams; // TODO param type
vFormat: string; // TODO vFormat type
CommonPrefixes: any[]; // TODO commonPrefixes type
Uploads: any[]; // TODO type
IsTruncated: boolean;
NextKeyMarker: string;
NextUploadIdMarker: string;
prefixLength: number;
queryPrefixLength: number;
keys: number;
maxKeys: number;
delimiter: any; // TODO type
splitter: any; // TODO type
logger: any // TODO type
constructor(params: MPUParams, logger: any, vFormat: string) {
this.params = params;
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
this.CommonPrefixes = [];
@ -51,8 +87,8 @@ class MultipartUploads {
}[this.vFormat]);
}
genMDParamsV0() {
const params = {};
genMDParamsV0(): V0Params {
const params: V0Params = {};
if (this.params.keyMarker) {
params.gt = `overview${this.params.splitter}` +
`${this.params.keyMarker}${this.params.splitter}`;
@ -74,6 +110,7 @@ class MultipartUploads {
}
genMDParamsV1() {
// TODO v1 params definition
const v0params = this.genMDParamsV0();
return listingParamsMasterKeysV0ToV1(v0params);
}
@ -85,7 +122,7 @@ class MultipartUploads {
* @param {String} value - The value of the key
* @return {undefined}
*/
addUpload(value) {
addUpload(value: string): undefined {
const tmp = JSON.parse(value);
this.Uploads.push({
key: tmp.key,
@ -114,7 +151,7 @@ class MultipartUploads {
* @param {String} commonPrefix - The commonPrefix to add
* @return {undefined}
*/
addCommonPrefix(commonPrefix) {
addCommonPrefix(commonPrefix: string): undefined {
if (this.CommonPrefixes.indexOf(commonPrefix) === -1) {
this.CommonPrefixes.push(commonPrefix);
this.NextKeyMarker = commonPrefix;
@ -122,11 +159,11 @@ class MultipartUploads {
}
}
getObjectKeyV0(obj) {
getObjectKeyV0(obj: any) { // TODO this is an Upload value
return obj.key;
}
getObjectKeyV1(obj) {
getObjectKeyV1(obj: any) { // TODO this is an Upload value
return obj.key.slice(DbPrefixes.Master.length);
}
@ -135,14 +172,14 @@ class MultipartUploads {
* @param {String} obj - The key and value of the element
* @return {number} - > 0: Continue, < 0: Stop
*/
filter(obj) {
filter(obj: any): number {
// Check first in case of maxkeys = 0
if (this.keys >= this.maxKeys) {
// In cases of maxKeys <= 0 => IsTruncated = false
this.IsTruncated = this.maxKeys > 0;
return FILTER_END;
}
const key = this.getObjectKey(obj);
const key = this.getObjectKey(obj); // TODO this is actually valid - see ctor
const value = obj.value;
if (this.delimiter) {
const mpuPrefixSlice = `overview${this.splitter}`.length;
@ -162,7 +199,7 @@ class MultipartUploads {
return FILTER_ACCEPT;
}
skipping() {
skipping(): string {
return '';
}
@ -170,7 +207,7 @@ class MultipartUploads {
* Returns the formatted result
* @return {Object} - The result.
*/
result() {
result(): object {
return {
CommonPrefixes: this.CommonPrefixes,
Uploads: this.Uploads,
@ -183,6 +220,7 @@ class MultipartUploads {
}
}
module.exports = {
export {
MultipartUploads,
};
MPUParams
}

View File

@ -1,14 +1,27 @@
'use strict'; // eslint-disable-line strict
const Extension = require('./Extension').default;
import { Extension } from './Extension';
const { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
import { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } from './tools';
const DEFAULT_MAX_KEYS = 10000;
interface ListParams {
maxKeys: number;
filterKey: any; // TODO type
filterKeyStartsWith: any; // TODO type
}
/**
* Class of an extension doing the simple listing
*/
class List extends Extension {
maxKeys: number;
filterKey: any;
filterKeyStartsWith: any;
/**
* Constructor
* Set the logger and the res
@ -16,7 +29,7 @@ class List extends Extension {
* @param {RequestLogger} logger - The logger of the request
* @return {undefined}
*/
constructor(parameters, logger) {
constructor(parameters: ListParams, logger: any) {
super(parameters, logger);
this.res = [];
if (parameters) {
@ -29,7 +42,7 @@ class List extends Extension {
this.keys = 0;
}
genMDParams() {
genMDParams(): object {
const params = this.parameters ? {
gt: this.parameters.gt,
gte: this.parameters.gte || this.parameters.start,
@ -53,7 +66,7 @@ class List extends Extension {
*
* @return {Boolean} Returns true if matches, else false.
*/
customFilter(value) {
customFilter(value: string): boolean {
let _value;
try {
_value = JSON.parse(value);
@ -90,7 +103,7 @@ class List extends Extension {
* @return {number} - > 0 : continue listing
* < 0 : listing done
*/
filter(elem) {
filter(elem: object): number {
// Check first in case of maxkeys <= 0
if (this.keys >= this.maxKeys) {
return FILTER_END;
@ -117,7 +130,7 @@ class List extends Extension {
* Function returning the result
* @return {Array} - The listed elements
*/
result() {
result(): any[] {
return this.res;
}
}

View File

@ -1,9 +1,9 @@
'use strict'; // eslint-disable-line strict
const Extension = require('./Extension').default;
const { inc, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
const VSConst = require('../../versioning/constants').VersioningConstants;
import { Extension } from './Extension';
import { inc, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } from './tools';
import { VersioningConstants as VSConst } from '../../versioning/constants';
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
/**
@ -14,10 +14,41 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
* @param {Number} delimiterIndex - 'folder' index in the path
* @return {String} - CommonPrefix
*/
function getCommonPrefix(key, delimiter, delimiterIndex) {
function getCommonPrefix(
key: string, delimiter: string, delimiterIndex: number
): string {
return key.substring(0, delimiterIndex + delimiter.length);
}
interface DelimiterParams {
delimiter: string;
prefix: string;
marker: string;
maxKeys: number;
v2: boolean;
startAfter: string;
continuationToken: string;
alphabeticalOrder: boolean;
}
interface DelimiterContentItem {
key: string;
value: string;
}
interface DelimiterResult {
CommonPrefixes: string[];
Contents: DelimiterContentItem[]; // TODO type this.Contents,
IsTruncated: boolean;
Delimiter: string;
NextMarker?: any; // TODO type
NextContinuationToken?: any; // TODO type
}
/**
* Handle object listing with parameters
*
@ -55,7 +86,25 @@ class Delimiter extends Extension {
* request
* @param {String} [vFormat] - versioning key format
*/
constructor(parameters, logger, vFormat) {
delimiter: string;
prefix: string;
marker: string;
maxKeys: number;
startAfter: string;
continuationToken: string;
alphabeticalOrder: boolean;
vFormat: string;
CommonPrefixes: string[];
Contents: DelimiterContentItem[];
IsTruncated: boolean;
NextMarker: string;
NextContinuationToken: string;
startMarker: string;
continueMarker: string;
nextContinueMarker: string;
constructor(parameters: DelimiterParams, logger: any, vFormat: string) {
super(parameters, logger);
// original listing parameters
this.delimiter = parameters.delimiter;
@ -134,7 +183,7 @@ class Delimiter extends Extension {
* final state of the result if it is the case
* @return {Boolean} - indicates if the iteration has to stop
*/
_reachedMaxKeys() {
_reachedMaxKeys(): boolean {
if (this.keys >= this.maxKeys) {
// In cases of maxKeys <= 0 -> IsTruncated = false
this.IsTruncated = this.maxKeys > 0;
@ -151,7 +200,7 @@ class Delimiter extends Extension {
* @param {String} value - The value of the key
* @return {number} - indicates if iteration should continue
*/
addContents(key, value) {
addContents(key: string, value: string): number {
if (this._reachedMaxKeys()) {
return FILTER_END;
}
@ -180,7 +229,7 @@ class Delimiter extends Extension {
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filter(obj) {
filter(obj): number {
const key = this.getObjectKey(obj);
const value = obj.value;
if ((this.prefix && !key.startsWith(this.prefix))
@ -206,7 +255,7 @@ class Delimiter extends Extension {
* @param {Number} index - after prefix starting point
* @return {Boolean} - indicates if iteration should continue
*/
addCommonPrefix(key, index) {
addCommonPrefix(key: string, index: number) {
const commonPrefix = getCommonPrefix(key, this.delimiter, index);
if (this.CommonPrefixes.indexOf(commonPrefix) === -1
&& this[this.nextContinueMarker] !== commonPrefix) {
@ -228,7 +277,7 @@ class Delimiter extends Extension {
* @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on
*/
skippingV0() {
skippingV0(): string {
return this[this.nextContinueMarker];
}
@ -239,7 +288,7 @@ class Delimiter extends Extension {
* @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on
*/
skippingV1() {
skippingV1(): string {
return DbPrefixes.Master + this[this.nextContinueMarker];
}
@ -249,12 +298,12 @@ class Delimiter extends Extension {
* isn't truncated
* @return {Object} - following amazon format
*/
result() {
result(): DelimiterResult {
/* NextMarker is only provided when delimiter is used.
* specified in v1 listing documentation
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
*/
const result = {
const result: DelimiterResult = {
CommonPrefixes: this.CommonPrefixes,
Contents: this.Contents,
IsTruncated: this.IsTruncated,
@ -271,4 +320,4 @@ class Delimiter extends Extension {
}
}
module.exports = { Delimiter };
export { Delimiter, DelimiterParams };

View File

@ -1,10 +1,11 @@
'use strict'; // eslint-disable-line strict
const Delimiter = require('./delimiter').Delimiter;
const Version = require('../../versioning/Version').Version;
const VSConst = require('../../versioning/constants').VersioningConstants;
import { Delimiter } from './delimiter';
import type { DelimiterParams } from './delimiter';
import { Version } from '../../versioning/Version';
import { VersioningConstants as VSConst } from '../../versioning/constants';
const { BucketVersioningKeyFormat } = VSConst;
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
import { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } from './tools';
const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes } = VSConst;
@ -27,7 +28,11 @@ class DelimiterMaster extends Delimiter {
* @param {RequestLogger} logger - The logger of the request
* @param {String} [vFormat] - versioning key format
*/
constructor(parameters, logger, vFormat) {
prvKey?: any; // TODO type
prvPHDKey?: any; // TODO type
inReplayPrefix?: any; // TODO type
constructor(parameters: DelimiterParams, logger: any, vFormat: string) {
super(parameters, logger, vFormat);
// non-PHD master version or a version whose master is a PHD version
this.prvKey = undefined;
@ -58,7 +63,7 @@ class DelimiterMaster extends Delimiter {
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filterV0(obj) {
filterV0(obj: object): number {
let key = obj.key;
const value = obj.value;
@ -155,14 +160,14 @@ class DelimiterMaster extends Delimiter {
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filterV1(obj) {
filterV1(obj: object): number {
// Filtering master keys in v1 is simply listing the master
// keys, as the state of version keys do not change the
// result, so we can use Delimiter method directly.
return super.filter(obj);
}
skippingBase() {
skippingBase(): string {
if (this[this.nextContinueMarker]) {
// next marker or next continuation token:
// - foo/ : skipping foo/
@ -177,14 +182,14 @@ class DelimiterMaster extends Delimiter {
return SKIP_NONE;
}
skippingV0() {
skippingV0(): string {
if (this.inReplayPrefix) {
return DbPrefixes.Replay;
}
return this.skippingBase();
}
skippingV1() {
skippingV1(): string {
const skipTo = this.skippingBase();
if (skipTo === SKIP_NONE) {
return SKIP_NONE;
@ -193,4 +198,4 @@ class DelimiterMaster extends Delimiter {
}
}
module.exports = { DelimiterMaster };
export { DelimiterMaster };

View File

@ -1,14 +1,32 @@
'use strict'; // eslint-disable-line strict
const Delimiter = require('./delimiter').Delimiter;
const Version = require('../../versioning/Version').Version;
const VSConst = require('../../versioning/constants').VersioningConstants;
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
require('./tools');
import { Delimiter } from './delimiter';
import type { DelimiterParams } from './delimiter';
import type { MPUParams } from './MPU';
import { Version } from '../../versioning/Version';
import { VersioningConstants as VSConst } from '../../versioning/constants';
import { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } from './tools';
const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
interface DelimiterVersionsParams extends DelimiterParams {
keyMarker: string; // TODO type
versionIdMarker: any; // TODO type
}
interface DelimiterVersionsResult {
CommonPrefixes: string[];
Versions: any; // TODO type
IsTruncated: boolean,
NextKeyMarker?: any; // TODO type
NextVersionIdMarker?: any; // TODO type
Delimiter: string;
}
/**
* Handle object listing with parameters
*
@ -22,7 +40,15 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
* @prop {Number} maxKeys - number of keys to list
*/
class DelimiterVersions extends Delimiter {
constructor(parameters, logger, vFormat) {
keyMarker: string;
versionIdMarker: any;
masterKey?: any; // TODO type
masterVersionId?: any; // TODO type
NextVersionIdMarker: any; // TODO type
inReplayPrefix: boolean;
constructor(parameters: DelimiterVersionsParams, logger: any, vFormat: string) {
super(parameters, logger, vFormat);
// specific to version listing
this.keyMarker = parameters.keyMarker;
@ -49,7 +75,7 @@ class DelimiterVersions extends Delimiter {
}[this.vFormat]);
}
genMDParamsV0() {
genMDParamsV0(): MPUParams {
const params = {};
if (this.parameters.prefix) {
params.gte = this.parameters.prefix;
@ -73,40 +99,41 @@ class DelimiterVersions extends Delimiter {
return params;
}
genMDParamsV1() {
genMDParamsV1(): MPUParams[] {
// return an array of two listing params sets to ask for
// synchronized listing of M and V ranges
const params = [{}, {}];
const mRangeParams: MPUParams = {};
const vRangeParams: MPUParams = {};
if (this.parameters.prefix) {
params[0].gte = DbPrefixes.Master + this.parameters.prefix;
params[0].lt = DbPrefixes.Master + inc(this.parameters.prefix);
params[1].gte = DbPrefixes.Version + this.parameters.prefix;
params[1].lt = DbPrefixes.Version + inc(this.parameters.prefix);
mRangeParams.gte = DbPrefixes.Master + this.parameters.prefix;
mRangeParams.lt = DbPrefixes.Master + inc(this.parameters.prefix);
vRangeParams.gte = DbPrefixes.Version + this.parameters.prefix;
vRangeParams.lt = DbPrefixes.Version + inc(this.parameters.prefix);
} else {
params[0].gte = DbPrefixes.Master;
params[0].lt = inc(DbPrefixes.Master); // stop after the last master key
params[1].gte = DbPrefixes.Version;
params[1].lt = inc(DbPrefixes.Version); // stop after the last version key
mRangeParams.gte = DbPrefixes.Master;
mRangeParams.lt = inc(DbPrefixes.Master); // stop after the last master key
vRangeParams.gte = DbPrefixes.Version;
vRangeParams.lt = inc(DbPrefixes.Version); // stop after the last version key
}
if (this.parameters.keyMarker) {
if (params[1].gte <= DbPrefixes.Version + this.parameters.keyMarker) {
delete params[0].gte;
delete params[1].gte;
params[0].gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP);
if (vRangeParams.gte <= DbPrefixes.Version + this.parameters.keyMarker) {
delete mRangeParams.gte;
delete vRangeParams.gte;
mRangeParams.gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP);
if (this.parameters.versionIdMarker) {
// versionIdMarker should always come with keyMarker
// but may not be the other way around
params[1].gt = DbPrefixes.Version
vRangeParams.gt = DbPrefixes.Version
+ this.parameters.keyMarker
+ VID_SEP
+ this.parameters.versionIdMarker;
} else {
params[1].gt = DbPrefixes.Version
vRangeParams.gt = DbPrefixes.Version
+ inc(this.parameters.keyMarker + VID_SEP);
}
}
}
return params;
return [mRangeParams, vRangeParams];
}
/**
@ -120,7 +147,7 @@ class DelimiterVersions extends Delimiter {
* * -1 if master key < version key
* * 1 if master key > version key
*/
compareObjects(masterObj, versionObj) {
compareObjects(masterObj: object, versionObj: object): number {
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
return masterKey < versionKey ? -1 : 1;
@ -136,7 +163,7 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the key
* @return {Boolean} - indicates if iteration should continue
*/
addContents(obj) {
addContents(obj: object): boolean {
if (this._reachedMaxKeys()) {
return FILTER_END;
}
@ -163,7 +190,7 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filterV0(obj) {
filterV0(obj: object): number {
if (obj.key.startsWith(DbPrefixes.Replay)) {
this.inReplayPrefix = true;
return FILTER_SKIP;
@ -189,7 +216,7 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filterV1(obj) {
filterV1(obj: object): number {
// this function receives both M and V keys, but their prefix
// length is the same so we can remove their prefix without
// looking at the type of key
@ -197,7 +224,7 @@ class DelimiterVersions extends Delimiter {
obj.value);
}
filterCommon(key, value) {
filterCommon(key: string, value: string): boolean {
if (this.prefix && !key.startsWith(this.prefix)) {
return FILTER_SKIP;
}
@ -230,7 +257,7 @@ class DelimiterVersions extends Delimiter {
return this.addContents({ key: nonversionedKey, value, versionId });
}
skippingV0() {
skippingV0(): string {
if (this.inReplayPrefix) {
return DbPrefixes.Replay;
}
@ -243,7 +270,7 @@ class DelimiterVersions extends Delimiter {
return SKIP_NONE;
}
skippingV1() {
skippingV1(): string {
const skipV0 = this.skippingV0();
if (skipV0 === SKIP_NONE) {
return SKIP_NONE;
@ -259,7 +286,7 @@ class DelimiterVersions extends Delimiter {
* isn't truncated
* @return {Object} - following amazon format
*/
result() {
result(): DelimiterVersionsResult {
/* NextMarker is only provided when delimiter is used.
* specified in v1 listing documentation
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
@ -276,4 +303,4 @@ class DelimiterVersions extends Delimiter {
}
}
module.exports = { DelimiterVersions };
export { DelimiterVersions };

View File

@ -1,9 +0,0 @@
module.exports = {
Basic: require('./basic').List,
Delimiter: require('./delimiter').Delimiter,
DelimiterVersions: require('./delimiterVersions')
.DelimiterVersions,
DelimiterMaster: require('./delimiterMaster')
.DelimiterMaster,
MPU: require('./MPU').MultipartUploads,
};

View File

@ -0,0 +1,13 @@
import { List as Basic } from './basic';
import { Delimiter } from './delimiter';
import { DelimiterVersions } from './delimiterVersions';
import { DelimiterMaster } from './delimiterMaster';
import { MultipartUploads as MPU } from './MPU';
export {
Basic,
Delimiter,
DelimiterVersions,
DelimiterMaster,
MPU,
};

View File

@ -1,10 +1,15 @@
const assert = require('assert');
import { strict as assert } from 'assert';
const { FILTER_END, FILTER_SKIP, SKIP_NONE } = require('./tools');
import { FILTER_END, FILTER_SKIP, SKIP_NONE } from './tools';
const MAX_STREAK_LENGTH = 100;
interface SkipParams {
extension: any;
gte: any;
}
/**
* Handle the filtering and the skip mechanism of a listing result.
*/
@ -15,14 +20,23 @@ class Skip {
* @param {String} params.gte - current range gte (greater than or
* equal) used by the client code
*/
constructor(params) {
extension: any;
gteParams: any;
listingEndCb?: Function;
skipRangeCb?: Function;
streakLength: number;
constructor(params: SkipParams) {
// TODO - once we're in strict TS everywhere, we no longer need these
// assertions
assert(params.extension);
this.extension = params.extension;
this.gteParams = params.gte;
this.listingEndCb = null;
this.skipRangeCb = null;
this.listingEndCb = undefined;
this.skipRangeCb = undefined;
/* Used to count consecutive FILTER_SKIP returned by the extension
* filter method. Once this counter reaches MAX_STREAK_LENGTH, the
@ -31,11 +45,11 @@ class Skip {
this.streakLength = 0;
}
setListingEndCb(cb) {
setListingEndCb(cb: Function) {
this.listingEndCb = cb;
}
setSkipRangeCb(cb) {
setSkipRangeCb(cb: Function) {
this.skipRangeCb = cb;
}
@ -47,9 +61,9 @@ class Skip {
* This function calls the listing end or the skip range callbacks if
* needed.
*/
filter(entry) {
assert(this.listingEndCb);
assert(this.skipRangeCb);
filter(entry: object): undefined {
assert(this.listingEndCb !== undefined);
assert(this.skipRangeCb !== undefined);
const filteringResult = this.extension.filter(entry);
const skippingRange = this.extension.skipping();
@ -73,7 +87,7 @@ class Skip {
}
}
_inc(str) {
_inc(str: string): string {
if (!str) {
return str;
}
@ -84,5 +98,7 @@ class Skip {
}
}
module.exports = Skip;
export {
Skip,
SkipParams
}

View File

@ -1,4 +1,6 @@
const { DbPrefixes } = require('../../versioning/constants').VersioningConstants;
import { VersioningConstants } from '../../versioning/constants';
const DbPrefixes = VersioningConstants.DbPrefixes;
// constants for extensions
const SKIP_NONE = undefined; // to be inline with the values of NextMarker
@ -15,8 +17,8 @@ const FILTER_END = -1;
* @param {Number} limit - The limit to respect
* @return {Number} - The parsed number || limit
*/
function checkLimit(number, limit) {
const parsed = Number.parseInt(number, 10);
function checkLimit(str: string, limit: number): number {
const parsed = Number.parseInt(str, 10);
const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit);
return valid ? parsed : limit;
}
@ -28,7 +30,7 @@ function checkLimit(number, limit) {
* @return {string} - the incremented string
* or the input if it is not valid
*/
function inc(str) {
function inc(str: string): string {
return str ? (str.slice(0, str.length - 1) +
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
}
@ -40,7 +42,7 @@ function inc(str) {
* @param {object} v0params - listing parameters for v0 format
* @return {object} - listing parameters for v1 format
*/
function listingParamsMasterKeysV0ToV1(v0params) {
function listingParamsMasterKeysV0ToV1(v0params: any): any {
const v1params = Object.assign({}, v0params);
if (v0params.gt !== undefined) {
v1params.gt = `${DbPrefixes.Master}${v0params.gt}`;
@ -59,7 +61,7 @@ function listingParamsMasterKeysV0ToV1(v0params) {
return v1params;
}
module.exports = {
export {
checkLimit,
inc,
listingParamsMasterKeysV0ToV1,

View File

@ -91,7 +91,7 @@ class Vault {
requestContext: serializedRCsArr,
},
(err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback)
params.log, callback),
);
}
@ -146,7 +146,7 @@ class Vault {
requestContext: serializedRCs,
},
(err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback, streamingV4Params)
params.log, callback, streamingV4Params),
);
}

View File

@ -187,7 +187,7 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
.filter(headerName =>
headerName.startsWith('x-amz-')
|| headerName.startsWith('x-scal-')
|| headerName === 'host'
|| headerName === 'host',
).sort().join(';');
const params = { request, signedHeaders, payloadChecksum,
credentialScope, timestamp, query: data,

View File

@ -29,7 +29,7 @@ class ChainBackend extends BaseBackend {
typeof client.getCanonicalIds === 'function' &&
typeof client.getEmailAddresses === 'function' &&
typeof client.checkPolicies === 'function' &&
typeof client.healthcheck === 'function'
typeof client.healthcheck === 'function',
), 'invalid client: missing required auth backend methods');
this._clients = clients;
}
@ -55,7 +55,7 @@ class ChainBackend extends BaseBackend {
signatureFromRequest,
accessKey,
options,
done
done,
), callback);
}
@ -67,7 +67,7 @@ class ChainBackend extends BaseBackend {
region,
scopeDate,
options,
done
done,
), callback);
}
@ -151,7 +151,7 @@ class ChainBackend extends BaseBackend {
requestContextParams,
userArn,
options,
done
done,
), (err, res) => {
if (err) {
return callback(err);
@ -169,7 +169,7 @@ class ChainBackend extends BaseBackend {
client.healthcheck(reqUid, (err, res) => done(null, {
error: !!err ? err : null,
status: res,
})
}),
), (err, res) => {
if (err) {
return callback(err);

View File

@ -273,7 +273,7 @@ class V4Transform extends Transform {
}
// get next chunk
return callback();
}
},
);
}
}

View File

@ -1,5 +1,6 @@
'use strict'; // eslint-disable-line strict
const crypto = require('crypto');
import { createHash } from 'crypto';
// The min value here is to manage further backward compat if we
// need it
@ -10,7 +11,7 @@ const iamSecurityTokenPattern =
new RegExp(`^[a-f0-9]{${iamSecurityTokenSizeMin},` +
`${iamSecurityTokenSizeMax}}$`);
module.exports = {
export default {
// info about the iam security token
iamSecurityToken: {
min: iamSecurityTokenSizeMin,
@ -92,7 +93,7 @@ module.exports = {
replicationBackends: { aws_s3: true, azure: true, gcp: true },
// hex digest of sha256 hash of empty string:
emptyStringHash: crypto.createHash('sha256')
emptyStringHash: createHash('sha256')
.update('', 'binary').digest('hex'),
mpuMDStoredExternallyBackend: { aws_s3: true, gcp: true },
// AWS sets a minimum size limit for parts except for the last part.

182
lib/db.js
View File

@ -1,182 +0,0 @@
'use strict'; // eslint-disable-line strict
const writeOptions = { sync: true };
/**
* Like Error, but with a property set to true.
* TODO: this is copied from kineticlib, should consolidate with the
* future errors module
*
* Example: instead of:
* const err = new Error("input is not a buffer");
* err.badTypeInput = true;
* throw err;
* use:
* throw propError("badTypeInput", "input is not a buffer");
*
* @param {String} propName - the property name.
* @param {String} message - the Error message.
* @returns {Error} the Error object.
*/
function propError(propName, message) {
const err = new Error(message);
err[propName] = true;
return err;
}
/**
* Running transaction with multiple updates to be committed atomically
*/
class IndexTransaction {
/**
* Builds a new transaction
*
* @argument {Leveldb} db an open database to which the updates
* will be applied
*
* @returns {IndexTransaction} a new empty transaction
*/
constructor(db) {
this.operations = [];
this.db = db;
this.closed = false;
this.conditions = [];
}
/**
* Adds a new operation to participate in this running transaction
*
* @argument {object} op an object with the following attributes:
* {
* type: 'put' or 'del',
* key: the object key,
* value: (optional for del) the value to store,
* }
*
* @throws {Error} an error described by the following properties
* - invalidTransactionVerb if op is not put or del
* - pushOnCommittedTransaction if already committed
* - missingKey if the key is missing from the op
* - missingValue if putting without a value
*
* @returns {undefined}
*/
push(op) {
if (this.closed) {
throw propError('pushOnCommittedTransaction',
'can not add ops to already committed transaction');
}
if (op.type !== 'put' && op.type !== 'del') {
throw propError('invalidTransactionVerb',
`unknown action type: ${op.type}`);
}
if (op.key === undefined) {
throw propError('missingKey', 'missing key');
}
if (op.type === 'put' && op.value === undefined) {
throw propError('missingValue', 'missing value');
}
this.operations.push(op);
}
/**
* Adds a new put operation to this running transaction
*
* @argument {string} key - the key of the object to put
* @argument {string} value - the value to put
*
* @throws {Error} an error described by the following properties
* - pushOnCommittedTransaction if already committed
* - missingKey if the key is missing from the op
* - missingValue if putting without a value
*
* @returns {undefined}
*
* @see push
*/
put(key, value) {
this.push({ type: 'put', key, value });
}
/**
* Adds a new del operation to this running transaction
*
* @argument {string} key - the key of the object to delete
*
* @throws {Error} an error described by the following properties
* - pushOnCommittedTransaction if already committed
* - missingKey if the key is missing from the op
*
* @returns {undefined}
*
* @see push
*/
del(key) {
this.push({ type: 'del', key });
}
/**
* Adds a condition for the transaction
*
* @argument {object} condition an object with the following attributes:
* {
* <condition>: the object key
* }
* example: { notExists: 'key1' }
*
* @throws {Error} an error described by the following properties
* - pushOnCommittedTransaction if already committed
* - missingCondition if the condition is empty
*
* @returns {undefined}
*/
addCondition(condition) {
if (this.closed) {
throw propError('pushOnCommittedTransaction',
'can not add conditions to already committed transaction');
}
if (condition === undefined || Object.keys(condition).length === 0) {
throw propError('missingCondition', 'missing condition for conditional put');
}
if (typeof (condition.notExists) !== 'string') {
throw propError('unsupportedConditionalOperation', 'missing key or supported condition');
}
this.conditions.push(condition);
}
/**
* Applies the queued updates in this transaction atomically.
*
* @argument {function} cb function to be called when the commit
* finishes, taking an optional error argument
*
* @returns {undefined}
*/
commit(cb) {
if (this.closed) {
return cb(propError('alreadyCommitted',
'transaction was already committed'));
}
if (this.operations.length === 0) {
return cb(propError('emptyTransaction',
'tried to commit an empty transaction'));
}
this.closed = true;
writeOptions.conditions = this.conditions;
// The array-of-operations variant of the `batch` method
// allows passing options such has `sync: true` whereas the
// chained form does not.
return this.db.batch(this.operations, writeOptions, cb);
}
}
module.exports = {
IndexTransaction,
};

View File

@ -1,4 +1,4 @@
function reshapeExceptionError(error) {
export function reshapeExceptionError(error: any) {
const { message, code, stack, name } = error;
return {
message,
@ -7,7 +7,3 @@ function reshapeExceptionError(error) {
name,
};
}
module.exports = {
reshapeExceptionError,
};

View File

@ -100,6 +100,8 @@ class ArsenalError extends Error {
const errors = ArsenalError.errorMap
export type { ArsenalError };
export default {
...errors
};

View File

@ -1,6 +1,8 @@
'use strict'; // eslint-disable-line
const debug = require('util').debuglog('jsutil');
import { debuglog } from 'util';
const debug = debuglog('jsutil');
// JavaScript utility functions
@ -17,9 +19,9 @@ const debug = require('util').debuglog('jsutil');
* @return {function} a callable wrapper mirroring <tt>func</tt> but
* only calls <tt>func</tt> at first invocation.
*/
module.exports.once = function once(func) {
export function once(func: Function): Function {
const state = { called: false, res: undefined };
return function wrapper(...args) {
return function wrapper(...args: any) {
if (!state.called) {
state.called = true;
state.res = func.apply(func, args);

View File

@ -17,7 +17,7 @@ class RedisClient {
method: 'RedisClient.constructor',
redisHost: config.host,
redisPort: config.port,
})
}),
);
return this;
}

View File

@ -9,7 +9,6 @@ const StatsClient = require('./StatsClient');
* rather than by seconds
*/
class StatsModel extends StatsClient {
/**
* Utility method to convert 2d array rows to columns, and vice versa
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip

View File

@ -1126,7 +1126,7 @@ class LifecycleConfiguration {
`<NoncurrentDays>${noncurrentDays}` +
'</NoncurrentDays>',
`<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>`
`</${actionName}>`,
);
});
Action = xml.join('');
@ -1146,7 +1146,7 @@ class LifecycleConfiguration {
`<${actionName}>`,
element,
`<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>`
`</${actionName}>`,
);
});
Action = xml.join('');

View File

@ -27,7 +27,7 @@ const errors = require('../errors');
* </NotificationConfiguration>
*/
/**
/**
* Format of config:
*
* config = {

View File

@ -17,7 +17,7 @@ const errors = require('../errors');
* </ObjectLockConfiguration>
*/
/**
/**
* Format of config:
*
* config = {

View File

@ -10,7 +10,6 @@ const ObjectMDLocation = require('./ObjectMDLocation');
* mpuPart metadata for example)
*/
class ObjectMD {
/**
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
* reserved for internal use, users should call

View File

@ -3,7 +3,6 @@
* 'location' array
*/
class ObjectMDLocation {
/**
* @constructor
* @param {object} locationObj - single data location info

View File

@ -10,7 +10,6 @@ const { checkSupportIPv6 } = require('./utils');
class Server {
/**
* @constructor
*

View File

@ -342,8 +342,6 @@ class KMIP {
return cb(null, response);
});
}
}

View File

@ -26,7 +26,7 @@ function sendError(res, log, error, optMessage) {
httpCode: error.code,
errorType: error.message,
error: message,
}
},
);
res.writeHead(error.code);
res.end(JSON.stringify({

View File

@ -45,7 +45,6 @@ function sendError(res, log, error, optMessage) {
* start() to start listening to the configured port.
*/
class RESTServer extends httpServer {
/**
* @constructor
* @param {Object} params - constructor params

View File

@ -17,7 +17,6 @@ const rpc = require('./rpc.js');
* RPC client object accessing the sub-level transparently.
*/
class LevelDbClient extends rpc.BaseClient {
/**
* @constructor
*
@ -78,7 +77,6 @@ class LevelDbClient extends rpc.BaseClient {
* env.subDb (env is passed as first parameter of received RPC calls).
*/
class LevelDbService extends rpc.BaseService {
/**
* @constructor
*

View File

@ -37,7 +37,6 @@ let streamRPCJSONObj;
* an error occurred).
*/
class BaseClient extends EventEmitter {
/**
* @constructor
*
@ -251,7 +250,6 @@ class BaseClient extends EventEmitter {
*
*/
class BaseService {
/**
* @constructor
*

View File

@ -1,9 +1,10 @@
'use strict'; // eslint-disable-line strict
const Ajv = require('ajv');
const userPolicySchema = require('./userPolicySchema');
const resourcePolicySchema = require('./resourcePolicySchema');
const errors = require('../errors');
import Ajv from 'ajv';
import * as userPolicySchema from './userPolicySchema.json';
import * as resourcePolicySchema from './resourcePolicySchema.json';
import errors from '../errors';
import type { ArsenalError } from '../errors';
const ajValidate = new Ajv({ allErrors: true });
ajValidate.addMetaSchema(require('ajv/lib/refs/json-schema-draft-06.json'));
@ -27,7 +28,7 @@ const errDict = {
};
// parse ajv errors and return early with the first relevant error
function _parseErrors(ajvErrors, policyType) {
function _parseErrors(ajvErrors: Ajv.ErrorObject[], policyType: string) {
let parsedErr;
if (policyType === 'user') {
// deep copy is needed as we have to assign custom error description
@ -67,7 +68,7 @@ function _parseErrors(ajvErrors, policyType) {
}
// parse JSON safely without throwing an exception
function _safeJSONParse(s) {
function _safeJSONParse(s: string): object {
try {
return JSON.parse(s);
} catch (e) {
@ -75,9 +76,20 @@ function _safeJSONParse(s) {
}
}
/**
* @typedef ValidationResult
* @type Object
* @property {Array|null} error - list of validation errors or null
* @property {Bool} valid - true/false depending on the validation result
*/
interface ValidationResult {
error: ArsenalError;
valid: boolean;
}
// validates policy using the validation schema
function _validatePolicy(type, policy) {
if (type === 'user') {
function _validatePolicy(policyType: string, policy: string): ValidationResult {
if (policyType === 'user') {
const parseRes = _safeJSONParse(policy);
if (parseRes instanceof Error) {
return { error: Object.assign({}, errors.MalformedPolicyDocument),
@ -90,7 +102,7 @@ function _validatePolicy(type, policy) {
}
return { error: null, valid: true };
}
if (type === 'resource') {
if (policyType === 'resource') {
const parseRes = _safeJSONParse(policy);
if (parseRes instanceof Error) {
return { error: Object.assign({}, errors.MalformedPolicy),
@ -105,19 +117,14 @@ function _validatePolicy(type, policy) {
}
return { error: errors.NotImplemented, valid: false };
}
/**
* @typedef ValidationResult
* @type Object
* @property {Array|null} error - list of validation errors or null
* @property {Bool} valid - true/false depending on the validation result
*/
/**
* Validates user policy
* @param {String} policy - policy json
* @returns {Object} - returns object with properties error and value
* @returns {ValidationResult} - result of the validation
*/
function validateUserPolicy(policy) {
function validateUserPolicy(policy: string): ValidationResult {
return _validatePolicy('user', policy);
}
@ -127,11 +134,11 @@ function validateUserPolicy(policy) {
* @returns {Object} - returns object with properties error and value
* @returns {ValidationResult} - result of the validation
*/
function validateResourcePolicy(policy) {
function validateResourcePolicy(policy: string): ValidationResult {
return _validatePolicy('resource', policy);
}
module.exports = {
export {
validateUserPolicy,
validateResourcePolicy,
};

View File

@ -51,10 +51,10 @@ wildcards.handleWildcardInResource = arn => {
// Wildcards can be part of the resource ARN.
// Wildcards do NOT span segments of the ARN (separated by ":")
// Example: all elements in specific bucket:
// "Resource": "arn:aws:s3:::my_corporate_bucket/*"
// ARN format:
// arn:partition:service:region:namespace:relative-id
// Example: all elements in specific bucket:
// "Resource": "arn:aws:s3:::my_corporate_bucket/*"
// ARN format:
// arn:partition:service:region:namespace:relative-id
const arnArr = arn.split(':');
return arnArr.map(portion => wildcards.handleWildcards(portion));
};

View File

@ -6,7 +6,6 @@ const crypto = require('crypto');
* data through a stream
*/
class MD5Sum extends Transform {
/**
* @constructor
*/
@ -40,7 +39,6 @@ class MD5Sum extends Transform {
this.emit('hashed');
callback(null);
}
}
module.exports = MD5Sum;

View File

@ -73,7 +73,7 @@ class ResultsCollector extends EventEmitter {
* @property {Error} [results[].error] - error returned by Azure putting subpart
* @property {number} results[].subPartIndex - index of the subpart
*/
/**
/**
* "error" event
* @event ResultCollector#error
* @type {(Error|undefined)} error - error returned by Azure last subpart

View File

@ -94,7 +94,7 @@ azureMpuUtils.getSubPartIds = (part, uploadId) =>
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
log, cb) => {
log, cb) => {
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
= params;
const blockId = azureMpuUtils.getBlockId(uploadId, partNumber, 0);
@ -121,7 +121,7 @@ log, cb) => {
return cb(errors.BadDigest);
}
return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err.message}`)
`Error returned from Azure: ${err.message}`),
);
}
const md5 = result.headers['content-md5'] || '';
@ -131,7 +131,7 @@ log, cb) => {
};
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
subPartStream, subPartIndex, resultsCollector, log, cb) => {
subPartStream, subPartIndex, resultsCollector, log, cb) => {
const { uploadId, partNumber, bucketName, objectKey } = partParams;
const subPartSize = azureMpuUtils.getSubPartSize(
subPartInfo, subPartIndex);
@ -144,7 +144,7 @@ subPartStream, subPartIndex, resultsCollector, log, cb) => {
};
azureMpuUtils.putSubParts = (errorWrapperFn, request, params,
dataStoreName, log, cb) => {
dataStoreName, log, cb) => {
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
const resultsCollector = new ResultsCollector();
const hashedStream = new MD5Sum();

View File

@ -33,7 +33,7 @@ convertMethods.listMultipartUploads = xmlParams => {
xml.push('<?xml version="1.0" encoding="UTF-8"?>',
'<ListMultipartUploadsResult ' +
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`,
);
// For certain XML elements, if it is `undefined`, AWS returns either an
@ -58,7 +58,7 @@ convertMethods.listMultipartUploads = xmlParams => {
});
xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`,
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`,
);
l.Uploads.forEach(upload => {
@ -84,14 +84,14 @@ convertMethods.listMultipartUploads = xmlParams => {
`<StorageClass>${escapeForXml(val.StorageClass)}` +
'</StorageClass>',
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
'</Upload>'
'</Upload>',
);
});
l.CommonPrefixes.forEach(prefix => {
xml.push('<CommonPrefixes>',
`<Prefix>${escapeForXml(prefix)}</Prefix>`,
'</CommonPrefixes>'
'</CommonPrefixes>',
);
});

View File

@ -5,7 +5,6 @@ const Readable = require('stream').Readable;
* This class is used to produce zeros filled buffers for a reader consumption
*/
class NullStream extends Readable {
/**
* Construct a new zeros filled buffers producer that will
* produce as much bytes as specified by the range parameter, or the size

View File

@ -110,7 +110,7 @@ function generateMpuPartStorageInfo(filteredPartList) {
* and extraPartLocations
*/
function validateAndFilterMpuParts(storedParts, jsonList, mpuOverviewKey,
splitter, log) {
splitter, log) {
let storedPartsCopy = [];
const filteredPartsObj = {};
filteredPartsObj.partList = [];

View File

@ -2,7 +2,7 @@ const errors = require('../../errors');
const routesUtils = require('../routesUtils');
function routerGET(request, response, api, log, statsClient,
dataRetrievalParams) {
dataRetrievalParams) {
log.debug('routing request', { method: 'routerGET' });
if (request.bucketName === undefined && request.objectKey !== undefined) {
routesUtils.responseXMLBody(errors.NoSuchBucket, null, response, log);

View File

@ -118,7 +118,7 @@ const XMLResponseBackend = {
`<Message>${errCode.description}</Message>`,
'<Resource></Resource>',
`<RequestId>${log.getSerializedUids()}</RequestId>`,
'</Error>'
'</Error>',
);
const xmlStr = xml.join('');
const bytesSent = Buffer.byteLength(xmlStr);
@ -377,7 +377,7 @@ function retrieveData(locations, retrieveDataParams, response, log) {
// call end for all cases (error/success) per node.js docs
// recommendation
response.end();
}
},
);
}
@ -592,7 +592,7 @@ const routesUtils = {
`<h1>${err.code} ${response.statusMessage}</h1>`,
'<ul>',
`<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>`
`<li>Message: ${err.description}</li>`,
);
if (!userErrorPageFailure && bucketName) {
@ -602,7 +602,7 @@ const routesUtils = {
`<li>RequestId: ${log.getSerializedUids()}</li>`,
// AWS response contains HostId here.
// TODO: consider adding
'</ul>'
'</ul>',
);
if (userErrorPageFailure) {
html.push(
@ -612,13 +612,13 @@ const routesUtils = {
'<ul>',
`<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>`,
'</ul>'
'</ul>',
);
}
html.push(
'<hr/>',
'</body>',
'</html>'
'</html>',
);
return response.end(html.join(''), 'utf8', () => {
@ -848,7 +848,7 @@ const routesUtils = {
return bucketName;
}
throw new Error(
`bad request: hostname ${host} is not in valid endpoints`
`bad request: hostname ${host} is not in valid endpoints`,
);
},

View File

@ -1,6 +1,6 @@
'use strict'; // eslint-disable-line strict
const randomBytes = require('crypto').randomBytes;
import { randomBytes } from 'crypto';
/*
* This set of function allows us to create an efficient shuffle
@ -18,13 +18,13 @@ const randomBytes = require('crypto').randomBytes;
* @return {number} the lowest number of bits
* @throws Error if number < 0
*/
function bitsNeeded(number) {
if (number < 0) {
function bitsNeeded(num: number): number {
if (num < 0) {
throw new Error('Input must be greater than or equal to zero');
} else if (number === 0) {
} else if (num === 0) {
return 1;
} else {
return Math.floor(Math.log2(number)) + 1;
return Math.floor(Math.log2(num)) + 1;
}
}
@ -36,7 +36,7 @@ function bitsNeeded(number) {
* if numbits === 0
* @throws Error if numBits < 0
*/
function createMaskOnes(numBits) {
function createMaskOnes(numBits: number): number {
if (numBits < 0) {
throw new Error('Input must be greater than or equal to zero');
}
@ -50,7 +50,7 @@ function createMaskOnes(numBits) {
* @return {buffer} a InRangebuffer with 'howMany' pseudo-random bytes.
* @throws Error if numBytes < 0 or if insufficient entropy
*/
function nextBytes(numBytes) {
function nextBytes(numBytes: number): Buffer {
if (numBytes < 0) {
throw new Error('Input must be greater than or equal to zero');
}
@ -67,7 +67,7 @@ function nextBytes(numBytes) {
* @return {number} the number of bytes needed
* @throws Error if numBits < 0
*/
function bitsToBytes(numBits) {
function bitsToBytes(numBits: number): number {
if (numBits < 0) {
throw new Error('Input must be greater than or equal to zero');
}
@ -83,7 +83,7 @@ function bitsToBytes(numBits) {
* @return {number} - a pseudo-random integer in [min,max], undefined if
* min >= max
*/
function randomRange(min, max) {
function randomRange(min: number, max: number): number {
if (max < min) {
throw new Error('Invalid range');
}
@ -98,7 +98,7 @@ function randomRange(min, max) {
// we use a mask as an optimization: it increases the chances for the
// candidate to be in range
const mask = createMaskOnes(bits);
let candidate;
let candidate: number;
do {
candidate = parseInt(nextBytes(bytes).toString('hex'), 16) & mask;
} while (candidate > range);
@ -111,7 +111,7 @@ function randomRange(min, max) {
* @param {Array} array - Any type of array
* @return {Array} - The sorted array
*/
module.exports = function shuffle(array) {
export default function shuffle<T>(array: T[]): T[] {
for (let i = array.length - 1; i > 0; i--) {
const randIndex = randomRange(0, i);
/* eslint-disable no-param-reassign */

View File

@ -11,7 +11,7 @@ const { createLogger, logHelper, removeQuotes, trimXMetaPrefix } =
const missingVerIdInternalError = errors.InternalError.customizeDescription(
'Invalid state. Please ensure versioning is enabled ' +
'in AWS for the location constraint and try again.'
'in AWS for the location constraint and try again.',
);
class AwsClient {
@ -94,7 +94,7 @@ class AwsClient {
err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`)
`${this.type}: ${err.message}`),
);
}
if (!data.VersionId && this._supportsVersioning) {
@ -233,7 +233,7 @@ class AwsClient {
}
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`)
`${this.type}: ${err.message}`),
);
}
return callback();
@ -307,7 +307,7 @@ class AwsClient {
err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`)
`${this.type}: ${err.message}`),
);
}
return callback(null, mpuResObj);
@ -335,7 +335,7 @@ class AwsClient {
'on uploadPart', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`)
`${this.type}: ${err.message}`),
);
}
// Because we manually add quotes to ETag later, remove quotes here
@ -363,7 +363,7 @@ class AwsClient {
err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`)
`${this.type}: ${err.message}`),
);
}
// build storedParts object to mimic Scality S3 backend returns
@ -435,7 +435,7 @@ class AwsClient {
'completeMPU', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`)
`${this.type}: ${err.message}`),
);
}
if (!completeMpuRes.VersionId && this._supportsVersioning) {
@ -453,7 +453,7 @@ class AwsClient {
'headObject', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`)
`${this.type}: ${err.message}`),
);
}
// remove quotes from eTag because they're added later
@ -481,7 +481,7 @@ class AwsClient {
this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`)
`${this.type}: ${err.message}`),
);
}
return callback();
@ -510,7 +510,7 @@ class AwsClient {
this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`)
`${this.type}: ${err.message}`),
);
}
return callback();
@ -533,7 +533,7 @@ class AwsClient {
this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`)
`${this.type}: ${err.message}`),
);
}
return callback();
@ -570,14 +570,14 @@ class AwsClient {
this._dataStoreName, this.clientType);
return callback(errors.AccessDenied
.customizeDescription('Error: Unable to access ' +
`${sourceAwsBucketName} ${this.type} bucket`)
`${sourceAwsBucketName} ${this.type} bucket`),
);
}
logHelper(log, 'error', 'error from data backend on ' +
'copyObject', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`)
`${this.type}: ${err.message}`),
);
}
if (!copyResult.VersionId && this._supportsVersioning) {
@ -629,14 +629,14 @@ class AwsClient {
this._dataStoreName, this.clientType);
return callback(errors.AccessDenied
.customizeDescription('Error: Unable to access ' +
`${sourceAwsBucketName} AWS bucket`)
`${sourceAwsBucketName} AWS bucket`),
);
}
logHelper(log, 'error', 'error from data backend on ' +
'uploadPartCopy', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`)
`${this.type}: ${err.message}`),
);
}
const eTag = removeQuotes(res.CopyPartResult.ETag);

View File

@ -422,14 +422,14 @@ class AzureClient {
this._dataStoreName);
return callback(errors.AccessDenied
.customizeDescription('Error: Unable to access ' +
`${sourceContainerName} Azure Container`)
`${sourceContainerName} Azure Container`),
);
}
logHelper(log, 'error', 'error from data backend on ' +
'copyObject', err, this._dataStoreName);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`AWS: ${err.message}`)
`AWS: ${err.message}`),
);
}
if (res.copy.status === 'pending') {
@ -443,12 +443,12 @@ class AzureClient {
'on abortCopyBlob', err, this._dataStoreName);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`AWS on abortCopyBlob: ${err.message}`)
`AWS on abortCopyBlob: ${err.message}`),
);
}
return callback(errors.InvalidObjectState
.customizeDescription('Error: Azure copy status was ' +
'pending. It has been aborted successfully')
'pending. It has been aborted successfully'),
);
});
}

View File

@ -123,7 +123,7 @@ class GcpClient extends AwsClient {
err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`GCP: ${err.message}`)
`GCP: ${err.message}`),
);
}
return callback(null, mpuResObj);
@ -168,7 +168,7 @@ class GcpClient extends AwsClient {
'completeMPU', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`GCP: ${err.message}`)
`GCP: ${err.message}`),
);
}
if (!completeMpuRes.VersionId) {
@ -210,7 +210,7 @@ class GcpClient extends AwsClient {
'on uploadPart', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`GCP: ${err.message}`)
`GCP: ${err.message}`),
);
}
// remove quotes from eTag because they're added later
@ -242,7 +242,7 @@ class GcpClient extends AwsClient {
if (copySourceRange) {
return callback(errors.NotImplemented
.customizeDescription('Error returned from ' +
`${this.clientType}: copySourceRange not implemented`)
`${this.clientType}: copySourceRange not implemented`),
);
}
@ -261,14 +261,14 @@ class GcpClient extends AwsClient {
this._dataStoreName, this.clientType);
return callback(errors.AccessDenied
.customizeDescription('Error: Unable to access ' +
`${sourceGcpBucketName} GCP bucket`)
`${sourceGcpBucketName} GCP bucket`),
);
}
logHelper(log, 'error', 'error from data backend on ' +
'uploadPartCopy', err, this._dataStoreName);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`GCP: ${err.message}`)
`GCP: ${err.message}`),
);
}
// remove quotes from eTag because they're added later
@ -291,7 +291,7 @@ class GcpClient extends AwsClient {
'on abortMPU', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`GCP: ${err.message}`)
`GCP: ${err.message}`),
);
}
return callback();

View File

@ -45,7 +45,7 @@ class PfsClient {
}
return callback(null, keyContext.objectKey, '',
keyContext.metaHeaders['x-amz-meta-size'],
md5
md5,
);
}
logHelper(log, 'error', 'Not implemented', errors.NotImplemented,

View File

@ -35,7 +35,6 @@ const FOLDER_HASH = 3511;
* directory hash structure under the configured dataPath.
*/
class DataFileStore {
/**
* @constructor
* @param {Object} dataConfig - configuration of the file backend

View File

@ -30,7 +30,6 @@ class ListRecordStream extends stream.Transform {
* @classdesc Proxy object to access raft log API
*/
class LogConsumer {
/**
* @constructor
*

View File

@ -14,13 +14,13 @@ const _operatorType1 = joi.string().valid(
'$gt',
'$gte',
'$lt',
'$lte'
'$lte',
);
// supports strings, numbers, and boolean
const _operatorType2 = joi.string().valid(
'$eq',
'$ne'
'$ne',
);
const _valueType1 = joi.alternatives([

View File

@ -17,7 +17,6 @@ const METASTORE = '__metastore';
const itemScanRefreshDelay = 1000 * 30 * 60; // 30 minutes
class BucketFileInterface {
/**
* @constructor
* @param {object} [params] - constructor params

View File

@ -8,7 +8,6 @@ const { RecordLogProxy } = require('./RecordLog.js');
const werelogs = require('werelogs');
class MetadataFileClient {
/**
* Construct a metadata client
*

View File

@ -25,7 +25,6 @@ const SYNC_OPTIONS = { sync: true };
const SUBLEVEL_SEP = '::';
class MetadataFileServer {
/**
* Construct a metadata server
*

View File

@ -18,7 +18,6 @@ const DEFAULT_RECORD_LOG_NAME = 's3-recordlog';
* object.
*/
class RecordLogProxy extends rpc.BaseClient {
constructor(params) {
super(params);
@ -102,7 +101,6 @@ class ListRecordStream extends stream.Transform {
* updates can be transactional with each other.
*/
class RecordLogService extends rpc.BaseService {
/**
* @constructor
*

View File

@ -9,7 +9,6 @@ const MongoUtils = require('./utils');
* @classdesc Class to consume mongo oplog
*/
class LogConsumer {
/**
* @constructor
*

View File

@ -538,7 +538,7 @@ class MongoClientInterface {
updateOne: {
// eslint-disable-next-line
filter: {
_id: objName,
'_id': objName,
'value.versionId': params.versionId,
},
update: {
@ -607,7 +607,7 @@ class MongoClientInterface {
MongoUtils.serialize(mstObjVal);
// eslint-disable-next-line
c.update({
_id: objName,
'_id': objName,
'value.versionId': {
// We break the semantic correctness here with
// $gte instead of $gt because we do not have
@ -760,7 +760,7 @@ class MongoClientInterface {
MongoUtils.serialize(objVal);
// eslint-disable-next-line
c.findOneAndReplace({
_id: objName,
'_id': objName,
'value.isPHD': true,
'value.versionId': mst.versionId,
}, {
@ -822,7 +822,7 @@ class MongoClientInterface {
// version:
// eslint-disable-next-line
c.findOneAndDelete({
_id: objName,
'_id': objName,
'value.isPHD': true,
'value.versionId': mst.versionId,
}, {}, err => {
@ -1616,7 +1616,7 @@ class MongoClientInterface {
const retResult = this._handleResults(collRes, isVer);
retResult.stalled = stalledCount;
return callback(null, retResult);
}
},
);
}

View File

@ -11,7 +11,6 @@ const requiresOneWorker = {
};
class Server {
/**
* Create a new Metadata Proxy Server instance
*

View File

@ -6,7 +6,7 @@
* @param {String} str - The string to compute the hash
* @return {Number} The computed hash
*/
function stringHash(str) {
function stringHash(str: string): number {
let hash = 5381;
let i = str.length;
@ -22,4 +22,4 @@ function stringHash(str) {
return hash >>> 0;
}
module.exports = stringHash;
export default stringHash;

View File

@ -160,7 +160,7 @@ class TestMatrix {
const result = Object.keys(matrixChild.params)
.every(currentKey =>
Object.prototype.toString.call(
matrixChild.params[currentKey]
matrixChild.params[currentKey],
).indexOf('Array') === -1);
if (result === true) {

View File

@ -1,6 +1,16 @@
'use strict'; // eslint-disable-line strict
const VID_SEP = require('./constants').VersioningConstants.VersionId.Separator;
import { VersioningConstants } from './constants';
const VID_SEP = VersioningConstants.VersionId.Separator;
interface VersionContents {
isNull: boolean;
isDeleteMarker: boolean;
versionId: string;
otherInfo: any; // TODO type
}
/**
* Class for manipulating an object version.
@ -12,6 +22,9 @@ const VID_SEP = require('./constants').VersioningConstants.VersionId.Separator;
* use with a production setup with Metadata).
*/
class Version {
version: VersionContents; // TODO type
/**
* Create a new version instantiation from its data object.
* @param {object} version - the data object to instantiate
@ -20,7 +33,7 @@ class Version {
* @param {string} version.versionId - the version id
* @constructor
*/
constructor(version) {
constructor(version: VersionContents) {
this.version = version || {};
}
@ -30,7 +43,7 @@ class Version {
* @param {string} value - the string to parse
* @return {Version} - the version deserialized from the input string
*/
static from(value) {
static from(value: string): Version {
return new Version(value ? JSON.parse(value) : undefined);
}
@ -40,7 +53,7 @@ class Version {
* @param {string} value - version to check
* @return {boolean} - whether this is a PHD version
*/
static isPHD(value) {
static isPHD(value: string): boolean {
// check if the input is a valid version
if (!value) {
return false;
@ -67,7 +80,7 @@ class Version {
* @param {string} versionId - versionId of the PHD version
* @return {string} - the serialized version
*/
static generatePHDVersion(versionId) {
static generatePHDVersion(versionId: string): string {
return `{ "isPHD": true, "versionId": "${versionId}" }`;
}
@ -79,7 +92,7 @@ class Version {
* @param {string} versionId - the versionId to append
* @return {string} - the object with versionId appended
*/
static appendVersionId(value, versionId) {
static appendVersionId(value: string, versionId: string): string {
// assuming value has the format of '{...}'
let index = value.length - 2;
while (value.charAt(index--) === ' ');
@ -93,7 +106,7 @@ class Version {
*
* @return {boolean} - whether this is a PHD version
*/
isPHDVersion() {
isPHDVersion(): boolean {
return this.version.isPHD || false;
}
@ -102,7 +115,7 @@ class Version {
*
* @return {boolean} - stating if the value is a null version
*/
isNullVersion() {
isNullVersion(): boolean {
return this.version.isNull;
}
@ -112,7 +125,7 @@ class Version {
* @param {string} value - the stringified object to check
* @return {boolean} - if the object is a delete marker
*/
static isDeleteMarker(value) {
static isDeleteMarker(value: string): boolean {
const index = value.indexOf('isDeleteMarker');
if (index < 0) {
return false;
@ -130,7 +143,7 @@ class Version {
*
* @return {boolean} - stating if the value is a delete marker
*/
isDeleteMarkerVersion() {
isDeleteMarkerVersion(): boolean {
return this.version.isDeleteMarker;
}
@ -139,7 +152,7 @@ class Version {
*
* @return {string} - the versionId
*/
getVersionId() {
getVersionId(): string {
return this.version.versionId;
}
@ -149,7 +162,7 @@ class Version {
* @param {string} versionId - the versionId
* @return {Version} - the updated version
*/
setVersionId(versionId) {
setVersionId(versionId: string): Version {
this.version.versionId = versionId;
return this;
}
@ -159,7 +172,7 @@ class Version {
*
* @return {Version} - the updated version
*/
setDeleteMarker() {
setDeleteMarker(): Version {
this.version.isDeleteMarker = true;
return this;
}
@ -169,7 +182,7 @@ class Version {
*
* @return {Version} - the updated version
*/
setNullVersion() {
setNullVersion(): Version {
this.version.isNull = true;
return this;
}
@ -179,14 +192,15 @@ class Version {
*
* @return {string} - the serialized version
*/
toString() {
toString(): string {
return JSON.stringify(this.version);
}
}
// TODO type key can be array, str, ...
function isMasterKey(key) {
return !key.includes(VID_SEP);
}
module.exports = { Version, isMasterKey };
export { Version, isMasterKey };

View File

@ -6,9 +6,11 @@
// - rep_group_id 07 bytes replication group identifier
// - other_information arbitrary user input, such as a unique string
const base62Integer = require('base62');
import * as base62 from 'base62';
const BASE62 = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
const base62String = require('base-x')(BASE62);
import base from 'base-x'
const base62String = base(BASE62);
// the lengths of the components in bytes
const LENGTH_TS = 14; // timestamp: epoch in ms
@ -178,8 +180,8 @@ function base62Encode(str) {
const part1 = Number(str.substring(0, B62V_HALF));
const part2 = Number(str.substring(B62V_HALF, B62V_TOTAL));
const part3 = Buffer.from(str.substring(B62V_TOTAL));
const enc1 = base62Integer.encode(part1);
const enc2 = base62Integer.encode(part2);
const enc1 = base62.encode(part1);
const enc2 = base62.encode(part2);
const enc3 = base62String.encode(part3);
return (B62V_EPAD + enc1).slice(-B62V_EPAD.length) +
(B62V_EPAD + enc2).slice(-B62V_EPAD.length) +
@ -197,10 +199,10 @@ function base62Decode(str) {
try {
let start = 0;
const enc1 = str.substring(start, start + B62V_EPAD.length);
const orig1 = base62Integer.decode(enc1);
const orig1 = base62.decode(enc1);
start += B62V_EPAD.length;
const enc2 = str.substring(start, start + B62V_EPAD.length);
const orig2 = base62Integer.decode(enc2);
const orig2 = base62.decode(enc2);
start += B62V_EPAD.length;
const enc3 = str.substring(start);
const orig3 = base62String.decode(enc3);
@ -246,8 +248,15 @@ function decode(str) {
return hexDecode(str);
}
module.exports = { generateVersionId, getInfVid,
hexEncode, hexDecode,
base62Encode, base62Decode,
encode, decode,
ENC_TYPE_HEX, ENC_TYPE_BASE62 };
export {
generateVersionId,
getInfVid,
hexEncode,
hexDecode,
base62Encode,
base62Decode,
encode,
decode,
ENC_TYPE_HEX,
ENC_TYPE_BASE62
};

View File

@ -1,10 +1,12 @@
const errors = require('../errors');
const Version = require('./Version').Version;
const genVID = require('./VersionID').generateVersionId;
import errors from '../errors';
import { Version } from './Version';
import type WriteCache from './WriteCache';
import type WriteGatheringManager from './WriteGatheringManager';
import { generateVersionId as genVID } from './VersionID';
// some predefined constants
const VID_SEP = require('./constants').VersioningConstants.VersionId.Separator;
import { VersioningConstants } from './constants';
const VID_SEP = VersioningConstants.VersionId.Separator;
/**
* Increment the charCode of the last character of a valid string.
@ -12,7 +14,7 @@ const VID_SEP = require('./constants').VersioningConstants.VersionId.Separator;
* @param {string} prefix - the input string
* @return {string} - the incremented string, or the input if it is not valid
*/
function getPrefixUpperBoundary(prefix) {
function getPrefixUpperBoundary(prefix: string): string {
if (prefix) {
return prefix.slice(0, prefix.length - 1) +
String.fromCharCode(prefix.charCodeAt(prefix.length - 1) + 1);
@ -20,18 +22,34 @@ function getPrefixUpperBoundary(prefix) {
return prefix;
}
function formatVersionKey(key, versionId) {
function formatVersionKey(key: string, versionId: string): string {
return `${key}${VID_SEP}${versionId}`;
}
function formatCacheKey(db, key) {
function formatCacheKey(db: string, key: string): string {
// using double VID_SEP to make sure the cache key is unique
return `${db}${VID_SEP}${VID_SEP}${key}`;
}
const VID_SEPPLUS = getPrefixUpperBoundary(VID_SEP);
interface RepairHints {
type: string;
value: string;
nextValue: string;
}
class VersioningRequestProcessor {
writeCache: WriteCache;
wgm: WriteGatheringManager;
replicationGroupId: any// TODO
uidCounter: number;
queue: object;
repairing: object;
/**
* This class takes a random string generator as additional input.
* @param {WriteCache} writeCache - the WriteCache to which this
@ -43,7 +61,11 @@ class VersioningRequestProcessor {
* @param {string} versioning.replicationGroupId - replication group id
* @constructor
*/
constructor(writeCache, writeGatheringManager, versioning) {
constructor(
writeCache: WriteCache,
writeGatheringManager: WriteGatheringManager,
versioning: object
) {
this.writeCache = writeCache;
this.wgm = writeGatheringManager;
this.replicationGroupId = versioning.replicationGroupId;
@ -69,13 +91,13 @@ class VersioningRequestProcessor {
* @param {function} callback - callback function
* @return {any} - to finish the call
*/
get(request, logger, callback) {
get(request: object, logger: object, callback: Function) {
const { db, key, options } = request;
if (options && options.versionId) {
const versionKey = formatVersionKey(key, options.versionId);
return this.wgm.get({ db, key: versionKey }, logger, callback);
}
return this.wgm.get(request, logger, (err, data) => {
return this.wgm.get(request, logger, (err: Error, data) => {
if (err) {
return callback(err);
}
@ -103,7 +125,7 @@ class VersioningRequestProcessor {
* @param {function} callback - callback function
* @return {any} - to finish the call
*/
getByListing(request, logger, callback) {
getByListing(request: object, logger: object, callback: Function): any {
// enqueue the get entry; do nothing if another is processing it
// this is to manage the number of expensive listings when there
// are multiple concurrent gets on the same key which is a PHD version
@ -156,7 +178,7 @@ class VersioningRequestProcessor {
* @param {function} callback - callback function
* @return {boolean} - this request is the first in the queue or not
*/
enqueueGet(request, logger, callback) {
enqueueGet(request: object, logger: object, callback: Function): boolean {
const cacheKey = formatCacheKey(request.db, request.key);
// enqueue the get entry if another is processing it
if (this.queue[cacheKey]) {
@ -179,7 +201,7 @@ class VersioningRequestProcessor {
* @param {string} value - resulting value of the first request
* @return {undefined}
*/
dequeueGet(request, err, value) {
dequeueGet(request: object, err: object, value: string): undefined {
const cacheKey = formatCacheKey(request.db, request.key);
if (this.queue[cacheKey]) {
this.queue[cacheKey].forEach(entry => {
@ -208,7 +230,7 @@ class VersioningRequestProcessor {
(for 'put')
* @return {any} - to finish the call
*/
repairMaster(request, logger, hints) {
repairMaster(request: object, logger: object, hints: RepairHints): any {
const { db, key } = request;
logger.info('start repair process', { request });
this.writeCache.get({ db, key }, logger, (err, value) => {
@ -248,7 +270,7 @@ class VersioningRequestProcessor {
* @param {function} callback - expect callback(err, data)
* @return {any} - to finish the call
*/
put(request, logger, callback) {
put(request: object, logger: object, callback: Function): any {
const { db, key, value, options } = request;
// valid combinations of versioning options:
// - !versioning && !versionId: normal non-versioning put
@ -318,7 +340,7 @@ class VersioningRequestProcessor {
* @param {function} callback - expect callback(err, batch, versionId)
* @return {any} - to finish the call
*/
processVersionSpecificPut(request, logger, callback) {
processVersionSpecificPut(request: object, logger: object, callback: Function): any {
const { db, key } = request;
// versionId is empty: update the master version
if (request.options.versionId === '') {
@ -347,7 +369,7 @@ class VersioningRequestProcessor {
}
del(request, logger, callback) {
del(request: object, logger: object, callback: Function) {
const { db, key, options } = request;
// no versioning or versioning configuration off
if (!(options && options.versionId)) {
@ -379,7 +401,9 @@ class VersioningRequestProcessor {
* @param {function} callback - expect callback(err, batch, versionId)
* @return {any} - to finish the call
*/
processVersionSpecificDelete(request, logger, callback) {
processVersionSpecificDelete(
request: object, logger: object, callback: Function
): any {
const { db, key, options } = request;
// deleting a specific version
this.writeCache.get({ db, key }, logger, (err, data) => {
@ -406,4 +430,4 @@ class VersioningRequestProcessor {
}
}
module.exports = VersioningRequestProcessor;
export default VersioningRequestProcessor;

View File

@ -1,6 +1,7 @@
'use strict'; // eslint-disable-line
const errors = require('../errors');
import errors from '../errors';
import type WriteGatheringManager from './WriteGatheringManager'
function formatCacheKey(db, key) {
return `${db}\0\0${key}`;
@ -25,7 +26,13 @@ function formatCacheKey(db, key) {
* remains only until the write is done and no other update is using it.
*/
class WriteCache {
constructor(wgm) {
wgm: WriteGatheringManager;
cache: object;
queue: object;
counter: number;
constructor(wgm: WriteGatheringManager) {
this.wgm = wgm;
// internal state
this.cache = {};
@ -43,7 +50,7 @@ class WriteCache {
* @param {function} callback - callback function: callback(error, value)
* @return {any} - to finish the call
*/
get(request, logger, callback) {
get(request: object, logger: object, callback: Function): any {
const { db, key } = request;
const cacheKey = formatCacheKey(db, key);
@ -60,7 +67,7 @@ class WriteCache {
return null;
}
// no other is in progress, get the key from the database
return this.wgm.get(request, logger, (err, value) => {
return this.wgm.get(request, logger, (err: Error, value: any) => {
// answer all the queued requests
this._dequeue(cacheKey, signature, err, value);
});
@ -75,7 +82,7 @@ class WriteCache {
* entry in the queue (which will do the get from the
* database), undefined otherwise
*/
_enqueue(cacheKey, callback) {
_enqueue(cacheKey: string, callback: Function) {
if (this.queue[cacheKey]) {
this.queue[cacheKey].queue.push(callback);
return undefined;
@ -94,7 +101,9 @@ class WriteCache {
* @param {boolean} force - force dequeuing even on signature mismatch
* @return {undefined} - nothing
*/
_dequeue(cacheKey, signature, err, value, force = false) {
_dequeue(
cacheKey: string, signature: number, err: object, value: string, force = false
): undefined {
if (this.queue[cacheKey] === undefined) {
return;
}
@ -139,7 +148,7 @@ class WriteCache {
* @param {function} callback - asynchronous callback of the call
* @return {undefined}
*/
batch(request, logger, callback) {
batch(request: object, logger: object, callback: Function): undefined {
const { db, array } = request;
const signature = this._cacheWrite(db, array);
this.wgm.batch(request, logger, (err, data) => {
@ -159,7 +168,7 @@ class WriteCache {
* @param {object} array - batch operation to apply on the database
* @return {string} - signature of the request
*/
_cacheWrite(db, array) {
_cacheWrite(db: string, array: object): string {
const signature = this.counter++;
array.forEach(entry => {
const cacheKey = formatCacheKey(db, entry.key);
@ -177,7 +186,7 @@ class WriteCache {
* @param {string} signature - signature if temporarily cached
* @return {undefined}
*/
_cacheClear(db, array, signature) {
_cacheClear(db: string, array: object, signature: string): undefined {
array.forEach(entry => {
const key = formatCacheKey(db, entry.key);
if (this.cache[key] && this.cache[key].signature === signature) {
@ -190,4 +199,4 @@ class WriteCache {
}
}
module.exports = WriteCache;
export default WriteCache;

View File

@ -6,7 +6,11 @@ const WG_TIMEOUT = 5; // batching period in milliseconds
* from operations targeting the same database.
*/
class WriteGatheringManager {
constructor(db) {
db: any; // TODO type
dbState: object; // TODO type
constructor(db: any) {
this.db = db;
this.dbState = {};
}
@ -21,11 +25,11 @@ class WriteGatheringManager {
* @param {function} callback - callback function: callback(error, value)
* @return {any} - to finish the call
*/
get(request, logger, callback) {
get(request: object, logger: object, callback: Function): any {
return this.db.get(request, logger, callback);
}
list(request, logger, callback) {
list(request: object, logger: object, callback: Function) {
return this.db.list(request, logger, callback);
}
@ -39,7 +43,7 @@ class WriteGatheringManager {
* @param {function} callback - callback(err)
* @return {WriteGatheringManager} - return this
*/
batch(request, logger, callback) {
batch(request: object, logger: object, callback: Function): WriteGatheringManager {
const { db, array } = request;
if (this.dbState[db] === undefined) {
this.dbState[db] = { db, isCommitting: false };
@ -55,7 +59,7 @@ class WriteGatheringManager {
};
}
const bCache = dbState.batchCache;
array.forEach((entry, index) => {
array.forEach((entry: any, index: number) => {
bCache.batch.push(entry);
bCache.uids.push(logger.getSerializedUids());
bCache.callback.push(index ? null : callback);
@ -69,7 +73,7 @@ class WriteGatheringManager {
* @param {string} db - Name of the database
* @return {any} - to finish the call
*/
_commitBatch(db) {
_commitBatch(db: string): any {
const dbState = this.dbState[db];
const bCache = dbState.batchCache;
// do nothing if no batch to replicate
@ -124,7 +128,7 @@ class WriteGatheringManager {
* @param {object} batch - the committed batch
* @return {undefined} - nothing
*/
_batchCommitted(error, batch) {
_batchCommitted(error: object, batch: object): undefined {
batch.callback.forEach(callback => {
if (callback) {
callback(error);
@ -133,4 +137,4 @@ class WriteGatheringManager {
}
}
module.exports = WriteGatheringManager;
export default WriteGatheringManager;

View File

@ -1,4 +1,4 @@
module.exports.VersioningConstants = {
const VersioningConstants = {
VersionId: {
Separator: '\0',
},
@ -16,3 +16,5 @@ module.exports.VersioningConstants = {
v1: 'v1',
},
};
export { VersioningConstants };

View File

@ -59,9 +59,10 @@
"@types/jest": "^27.0.3",
"@types/node": "^16.11.7",
"babel-plugin-add-module-exports": "^1.0.4",
"eslint": "2.13.1",
"eslint": "^7.32.0",
"eslint-config-airbnb": "6.2.0",
"eslint-config-scality": "scality/Guidelines#ec33dfb",
"eslint-plugin-jest": "^24.7.0",
"eslint-plugin-react": "^4.3.0",
"jest": "^27.4.5",
"mocha": "8.0.1",
@ -76,14 +77,15 @@
"lint_md": "mdlint $(git ls-files '*.md')",
"lint_yml": "yamllint $(git ls-files '*.yml')",
"test": "jest tests/unit",
"ft_test": "find tests/functional -name \"*.js\" | grep -v \"utils/\" | xargs mocha --timeout 120000 --exit",
"ft_test": "jest tests/functional --testTimeout=120000 --forceExit",
"coverage": "yarn coverage_unit && yarn coverage_ft && yarn coverage_report",
"coverage_unit": "nyc --silent yarn test",
"coverage_ft": "nyc --silent --no-clean yarn ft_test",
"coverage_unit": "yarn test --coverage",
"coverage_ft": "yarn ft_test --coverage",
"coverage_report": "nyc report --all --reporter=text-summary --reporter=lcov",
"compile": "tsc"
},
"jest": {
"collectCoverage": true,
"maxWorkers": 1,
"roots": [
".",
@ -94,7 +96,9 @@
"/node_modules/",
"helpers?\\.js",
"Mock.*\\.js",
"Dummy.*\\.js"
"Dummy.*\\.js",
"tests/functional/\\.*/utils\\.js",
"tests/functional/\\.*/utils/.*\\.js"
],
"testMatch": [
"**/*.js"

View File

@ -41,7 +41,7 @@ describe('KMIP Low Level Driver', () => {
return done(err);
}
const responsePayload = response.lookup(
'Response Message/Batch Item/Response Payload'
'Response Message/Batch Item/Response Payload',
)[0];
assert.deepStrictEqual(responsePayload,
requestPayload);

View File

@ -7,7 +7,7 @@ const { logger } = require('../../utils/kmip/ersatz.js');
describe('KMIP Connection Management', () => {
let server;
before(done => {
beforeAll(done => {
server = net.createServer(conn => {
// abort the connection as soon as it is accepted
conn.destroy();
@ -15,7 +15,7 @@ describe('KMIP Connection Management', () => {
server.listen(5696);
server.on('listening', done);
});
after(done => {
afterAll(done => {
server.close(done);
});

View File

@ -28,7 +28,7 @@ const mongoserver = new MongoMemoryReplSet({
describe('MongoClientInterface', () => {
let metadata;
before(done => {
beforeAll(done => {
mongoserver.waitUntilRunning().then(() => {
const opts = {
mongodb: {
@ -44,7 +44,7 @@ describe('MongoClientInterface', () => {
});
});
after(done => {
afterAll(done => {
async.series([
next => metadata.close(next),
next => mongoserver.stop()

View File

@ -152,8 +152,8 @@ function _deleteObjects(objects, cb) {
}
describe('Basic Metadata Proxy Server test',
function bindToThis() {
this.timeout(10000);
() => {
jest.setTimeout(10000);
it('Shoud get the metadataInformation', done => {
dispatcher.get('/default/metadataInformation',
(err, response, body) => {
@ -167,8 +167,8 @@ describe('Basic Metadata Proxy Server test',
});
});
describe('Basic Metadata Proxy Server CRUD test', function bindToThis() {
this.timeout(10000);
describe('Basic Metadata Proxy Server CRUD test', () => {
jest.setTimeout(10000);
beforeEach(done => {
dispatcher.post(`/default/bucket/${Bucket}`, bucketInfo,

View File

@ -28,7 +28,7 @@ describe('StatsClient class', () => {
afterEach(() => redisClient.clear(() => {}));
after(() => redisClient.disconnect());
afterAll(() => redisClient.disconnect());
it('should correctly record a new request by default one increment',
done => {

View File

@ -85,7 +85,7 @@ describe('LRUCache', () => {
assert.strictEqual(lru.get(100), undefined);
});
it('max 1000000 entries', function lru1M() {
it('max 1000000 entries', () => {
// this test takes ~1-2 seconds on a laptop, nevertheless set a
// large timeout to reduce the potential of flakiness on possibly
// slower CI environment.

View File

@ -85,7 +85,7 @@ const nonAlphabeticalData = [
const receivedData = data.map(item => ({ key: item.key, value: item.value }));
const receivedNonAlphaData = nonAlphabeticalData.map(
item => ({ key: item.key, value: item.value })
item => ({ key: item.key, value: item.value }),
);
const tests = [

View File

@ -189,7 +189,7 @@ describe('MergeStream', () => {
`${usePauseResume ? ' with pause/resume' : ''}` +
`${errorAtEnd ? ' with error' : ''}`;
it(`${nbEntries} sequential entries${fixtureDesc}`,
function bigMergeSequential(done) {
(done) => {
jest.setTimeout(10000);
const stream1 = [];
const stream2 = [];
@ -204,7 +204,7 @@ describe('MergeStream', () => {
stream1, stream2, usePauseResume, errorAtEnd, done);
});
it(`${nbEntries} randomly mingled entries${fixtureDesc}`,
function bigMergeRandom(done) {
(done) => {
jest.setTimeout(10000);
const stream1 = [];
const stream2 = [];

View File

@ -257,7 +257,7 @@ describe('Auth Backend: Chain Backend', () => {
id3: 'email3@test.com',
// id4 should be overwritten
id4: 'email5@test.com',
}
},
);
});
});

View File

@ -1,467 +0,0 @@
'use strict';// eslint-disable-line strict
const assert = require('assert');
const async = require('async');
const leveldb = require('level');
const temp = require('temp');
temp.track();
const db = require('../../index').db;
const errors = require('../../lib/errors');
const IndexTransaction = db.IndexTransaction;
const key1 = 'key1';
const key2 = 'key2';
const key3 = 'key3';
const value1 = 'value1';
const value2 = 'value2';
const value3 = 'value3';
function createDb() {
const indexPath = temp.mkdirSync();
return leveldb(indexPath, { valueEncoding: 'json' });
}
function checkValueInDb(db, k, v, done) {
db.get(k, (err, value) => {
if (err) {
return done(err);
}
if (value === v) {
return done();
}
return done(new Error('values differ'));
});
}
function checkValueNotInDb(db, k, done) {
db.get(k, (err, value) => {
if (!err || (err && !err.notFound)) {
return done(new Error(`value still in db: ${value}`));
}
return done();
});
}
function checkKeyNotExistsInDB(db, key, cb) {
return db.get(key, (err, value) => {
if (err && !err.notFound) {
return cb(err);
}
if (value) {
return cb(errors.PreconditionFailed);
}
return cb();
});
}
class ConditionalLevelDB {
constructor() {
this.db = createDb();
}
batch(operations, writeOptions, cb) {
return async.eachLimit(writeOptions.conditions, 10, (cond, asyncCallback) => {
switch (true) {
case ('notExists' in cond):
checkKeyNotExistsInDB(this.db, cond.notExists, asyncCallback);
break;
default:
asyncCallback(new Error('unsupported conditional operation'));
}
}, err => {
if (err) {
return cb(err);
}
return this.db.batch(operations, writeOptions, cb);
});
}
get client() {
return this.db;
}
}
describe('IndexTransaction', () => {
it('should allow put', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
transaction.push({
type: 'put',
key: 'k',
value: 'v',
});
transaction.commit(err => {
if (err) {
return done(err);
}
return checkValueInDb(db, 'k', 'v', done);
});
});
it('should allow del', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
transaction.push({
type: 'del',
key: 'k',
});
db.put('k', 'v', err => {
if (err) {
return done(err);
}
return transaction.commit(err => {
if (err) {
return done(err);
}
return checkValueNotInDb(db, 'k', done);
});
});
});
it('should commit put and del combined', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
transaction.push({
type: 'del',
key: 'k1',
});
transaction.push({
type: 'put',
key: 'k2',
value: 'v3',
});
function commitTransactionAndCheck(err) {
if (err) {
return done(err);
}
return transaction.commit(err => {
if (err) {
return done(err);
}
return checkValueNotInDb(db, 'k1', err => {
if (err) {
return done(err);
}
return checkValueInDb(db, 'k2', 'v3', done);
});
});
}
db.batch()
.put('k1', 'v1')
.put('k2', 'v2')
.write(commitTransactionAndCheck);
});
it('should refuse types other than del and put', done => {
const transaction = new IndexTransaction();
function tryPush() {
transaction.push({
type: 'append',
key: 'k',
value: 'v',
});
}
function validateError(err) {
if (err && err.invalidTransactionVerb) {
done();
return true;
}
return done(new Error('should have denied verb append'));
}
assert.throws(tryPush, validateError);
});
it('should refuse put without key', done => {
const transaction = new IndexTransaction();
function tryPush() {
transaction.push({
type: 'put',
value: 'v',
});
}
function validateError(err) {
if (err && err.missingKey) {
done();
return true;
}
return done(new Error('should have detected missing key'));
}
assert.throws(tryPush, validateError);
});
it('should refuse del without key', done => {
const transaction = new IndexTransaction();
function tryPush() {
transaction.push({
type: 'del',
});
}
function validateError(err) {
if (err && err.missingKey) {
done();
return true;
}
return done(new Error('should have detected missing key'));
}
assert.throws(tryPush, validateError);
});
it('should refuse put without value', done => {
const transaction = new IndexTransaction();
function tryPush() {
transaction.push({
type: 'put',
key: 'k',
});
}
function validateError(err) {
if (err && err.missingValue) {
done();
return true;
}
return done(new Error('should have detected missing value'));
}
assert.throws(tryPush, validateError);
});
it('should refuse to commit without any ops', done => {
const transaction = new IndexTransaction();
transaction.commit(err => {
if (err && err.emptyTransaction) {
return done();
}
return done(new Error('allowed to commit an empty transaction'));
});
});
it('should refuse to commit twice', done => {
const transaction = new IndexTransaction(createDb());
transaction.push({
type: 'put',
key: 'k',
value: 'v',
});
function tryCommitAgain(err) {
if (err) {
return done(err);
}
return transaction.commit(err2 => {
if (err2 && err2.alreadyCommitted) {
return done();
}
return done(new Error('allowed to commit twice'));
});
}
transaction.commit(tryCommitAgain);
});
it('should refuse add an op if already committed', done => {
const transaction = new IndexTransaction(createDb());
function push() {
transaction.push({
type: 'put',
key: 'k',
value: 'v',
});
}
function validateError(err) {
if (err && err.pushOnCommittedTransaction) {
done();
return true;
}
return done(new Error());
}
function tryPushAgain(err) {
if (err) {
return done(err);
}
return assert.throws(push, validateError);
}
push();
transaction.commit(tryPushAgain);
});
it('should have a working put shortcut method', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
transaction.put('k', 'v');
transaction.commit(err => {
if (err) {
return done(err);
}
return checkValueInDb(db, 'k', 'v', done);
});
});
it('should have a working del shortcut method', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
transaction.del('k');
db.put('k', 'v', err => {
if (err) {
return done(err);
}
return transaction.commit(err => {
if (err) {
return done(err);
}
return checkValueNotInDb(db, 'k', done);
});
});
});
it('should allow batch operation with notExists condition if key does not exist', done => {
const db = new ConditionalLevelDB();
const { client } = db;
const transaction = new IndexTransaction(db);
transaction.addCondition({ notExists: key1 });
transaction.push({
type: 'put',
key: key1,
value: value1,
});
return async.series([
next => transaction.commit(next),
next => client.get(key1, next),
], (err, res) => {
assert.ifError(err);
assert.strictEqual(res[1], value1);
return done();
});
});
it('should have a working addCondition shortcut method', done => {
const db = new ConditionalLevelDB();
const { client } = db;
const transaction = new IndexTransaction(db);
transaction.put(key1, value1);
transaction.addCondition({ notExists: 'key1' });
transaction.commit(err => {
if (err) {
return done(err);
}
return checkValueInDb(client, key1, value1, done);
});
});
it('should not allow any op in a batch operation with notExists condition if key exists', done => {
const db = new ConditionalLevelDB();
const { client } = db;
const transaction = new IndexTransaction(db);
function tryPushAgain(err) {
if (err) {
return done(err);
}
transaction.addCondition({ notExists: key1 });
transaction.push({
type: 'put',
key: key1,
value: value1,
});
transaction.push({
type: 'put',
key: key2,
value: value2,
});
transaction.push({
type: 'put',
key: key3,
value: value3,
});
return transaction.commit(err => {
if (!err || !err.PreconditionFailed) {
return done(new Error('should not be able to conditional put for duplicate key'));
}
return async.parallel([
next => checkKeyNotExistsInDB(client, key2, next),
next => checkKeyNotExistsInDB(client, key3, next),
], err => {
assert.ifError(err);
return done();
});
});
}
client.batch()
.put(key1, value1)
.write(tryPushAgain);
});
it('should not allow batch operation with empty condition', done => {
const transaction = new IndexTransaction();
try {
transaction.addCondition({});
done(new Error('should fail for empty condition'));
} catch (err) {
assert.strictEqual(err.missingCondition, true);
done();
}
});
it('should not allow batch operation with unsupported condition', done => {
const transaction = new IndexTransaction();
try {
transaction.addCondition({ exists: key1 });
done(new Error('should fail for unsupported condition, currently supported - notExists'));
} catch (err) {
assert.strictEqual(err.unsupportedConditionalOperation, true);
done();
}
});
});

View File

@ -57,7 +57,6 @@ function zpad(key, length = 15) {
}
class DummyRequestLogger {
constructor() {
this.ops = [];
this.counts = {

View File

@ -69,7 +69,7 @@ describe('Check IP matches a list of CIDR ranges', () => {
[['192.168.1.1'], '192.168.1.1'],
].forEach(item =>
it(`should match IP ${item[0][0]} without CIDR range`,
() => cidrListMatchCheck(item[0], item[1], true))
() => cidrListMatchCheck(item[0], item[1], true)),
);
it('should not range match if CIDR range is not provided',

View File

@ -631,7 +631,7 @@ Object.keys(acl).forEach(
dummyBucket.getUid(), testUid);
});
});
})
}),
);
describe('uid default', () => {

View File

@ -187,7 +187,7 @@ const passTests = [
];
describe('NotificationConfiguration class getValidatedNotificationConfiguration',
() => {
() => {
it('should return MalformedXML error if request xml is empty', done => {
const errMessage = 'request xml is undefined or empty';
checkError('', 'MalformedXML', errMessage, done);
@ -211,4 +211,4 @@ describe('NotificationConfiguration class getValidatedNotificationConfiguration'
});
});
});
});
});

View File

@ -228,7 +228,7 @@ const passTestsGetConfigXML = [
];
describe('ObjectLockConfiguration class getValidatedObjectLockConfiguration',
() => {
() => {
it('should return MalformedXML error if request xml is empty', done => {
const errMessage = 'request xml is undefined or empty';
checkError('', 'MalformedXML', errMessage, done);
@ -252,7 +252,7 @@ describe('ObjectLockConfiguration class getValidatedObjectLockConfiguration',
});
});
});
});
});
describe('ObjectLockConfiguration class getConfigXML', () => {
passTestsGetConfigXML.forEach(test => {

View File

@ -47,7 +47,7 @@ describe('network.probe.Utils', () => {
{
errorType: 'MethodNotAllowed',
errorMessage: errors.MethodNotAllowed.description,
}
},
);
done();
}),
@ -64,7 +64,7 @@ describe('network.probe.Utils', () => {
{
errorType: 'MethodNotAllowed',
errorMessage: 'Very much not allowed',
}
},
);
done();
}),

View File

@ -317,9 +317,9 @@ describe('patch location constriants', () => {
patchLocations(
{ [locationName]: locations },
{ privateKey },
mockLog
mockLog,
),
{ [locationName]: expected }
{ [locationName]: expected },
);
});
});
@ -330,9 +330,9 @@ describe('patch location constriants', () => {
patchLocations(
undefined,
{ privateKey },
mockLog
mockLog,
),
{}
{},
);
});
@ -345,9 +345,9 @@ describe('patch location constriants', () => {
},
},
{ privateKey },
mockLog
mockLog,
),
{}
{},
);
});
});

View File

@ -87,7 +87,7 @@ const operations = [
},
];
describe('GcpService request behavior', function testSuite() {
describe('GcpService request behavior', () => {
jest.setTimeout(120000);
let httpServer;
let client;
@ -125,7 +125,7 @@ describe('GcpService request behavior', function testSuite() {
});
});
describe('GcpService pathStyle tests', function testSuite() {
describe('GcpService pathStyle tests', () => {
jest.setTimeout(120000);
let httpServer;
let client;
@ -159,7 +159,7 @@ describe('GcpService pathStyle tests', function testSuite() {
}));
});
describe('GcpService dnsStyle tests', function testSuite() {
describe('GcpService dnsStyle tests', () => {
jest.setTimeout(120000);
let httpServer;
let client;

View File

@ -82,7 +82,6 @@ const malformedLogEntry = new MockStream(malformedLogEntryData);
// mock a simple bucketclient to get a fake raft log
class BucketClientMock {
getRaftLog(raftId, start, limit, targetLeader, reqUids, callback) {
switch (raftId) {
case 0:

View File

@ -395,7 +395,7 @@ describe('MongoClientInterface::_processEntryData', () => {
tests.forEach(([msg, isTransient, params, expected]) => it(msg, () => {
assert.deepStrictEqual(
mongoTestClient._processEntryData(params, isTransient),
expected
expected,
);
}));
});
@ -498,7 +498,7 @@ describe('MongoClientInterface::_isReplicationEntryStalled', () => {
tests.forEach(([msg, params, expected]) => it(msg, () => {
assert.deepStrictEqual(
mongoTestClient._isReplicationEntryStalled(params, testDate),
expected
expected,
);
}));
});

View File

@ -32,7 +32,7 @@ describe('StringHash', () => {
done();
});
it(`Should distribute uniformly with a maximum of ${ERROR}% of deviation`,
function f(done) {
(done) => {
jest.setTimeout(20000);
const strings = new Array(STRING_COUNT).fill('')
.map(() => randomString(10));

View File

@ -1,4 +1,4 @@
const VID = require('../../../lib/versioning/VersionID.js');
const VID = require('../../../lib/versioning/VersionID');
const assert = require('assert');
function randkey(length) {

View File

@ -177,7 +177,7 @@ class LoopbackServerChannel extends EchoChannel {
serverExtensions.map(extension =>
this.KMIP.TextString(
extension.name,
extension.value)
extension.value),
)));
}
if (queryFunctions.includes('Query Extension Map')) {

View File

@ -50,7 +50,6 @@ class EchoChannel extends EventEmitter {
this.clogged = true;
return this;
}
}
class MirrorChannel extends EchoChannel {

1103
yarn.lock

File diff suppressed because it is too large Load Diff