Compare commits

..

10 Commits

Author SHA1 Message Date
Jordi Bertran de Balanda ac852727ba fix ft_test 2022-01-13 17:30:42 +01:00
Jordi Bertran de Balanda 151fde6a35 fix lint after eslint upgrade 2022-01-13 15:14:13 +01:00
Jordi Bertran de Balanda 2dc3ac6bb6 upgrade eslint for jest plugin 2022-01-13 15:14:13 +01:00
Jordi Bertran de Balanda 476da8ed62 more migrations 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda 27e2e2393c migrate Extension 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda ab06f6f7fb more migration 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda 62d66e89ac migrate LRUCache 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda 1fc0efeb78 deprecate leveldb support 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda 8c2e95e31b initial batch of migrations 2022-01-12 18:35:33 +01:00
Jordi Bertran de Balanda e8bd68619d make index imports coherent 2022-01-12 18:35:33 +01:00
196 changed files with 4582 additions and 5011 deletions

View File

@ -1 +0,0 @@
{ "extends": "scality" }

7
.eslintrc.json Normal file
View File

@ -0,0 +1,7 @@
{
"extends": ["scality"],
"plugins": ["jest"],
"env": {
"jest/globals": true
}
}

View File

@ -1,8 +1,8 @@
module.exports = { module.exports = {
presets: [ presets: [
['@babel/preset-env', {targets: {node: 'current'}}], ['@babel/preset-env', { targets: { node: 'current' } }],
'@babel/preset-typescript', '@babel/preset-typescript',
], ],
plugins: ['add-module-exports'] plugins: ['add-module-exports'],
}; };

View File

@ -1,16 +1,15 @@
module.exports = { export default {
auth: require('./lib/auth/auth'), auth: require('./lib/auth/auth'),
constants: require('./lib/constants'), constants: require('./lib/constants'),
db: require('./lib/db'), errors: require('./lib/errors'),
errors: require('./lib/errors.js'),
errorUtils: require('./lib/errorUtils'), errorUtils: require('./lib/errorUtils'),
shuffle: require('./lib/shuffle'), shuffle: require('./lib/shuffle'),
stringHash: require('./lib/stringHash'), stringHash: require('./lib/stringHash'),
ipCheck: require('./lib/ipCheck'), ipCheck: require('./lib/ipCheck'),
jsutil: require('./lib/jsutil'), jsutil: require('./lib/jsutil'),
https: { https: {
ciphers: require('./lib/https/ciphers.js'), ciphers: require('./lib/https/ciphers'),
dhparam: require('./lib/https/dh2048.js'), dhparam: require('./lib/https/dh2048'),
}, },
algorithms: { algorithms: {
list: require('./lib/algos/list/exportAlgos'), list: require('./lib/algos/list/exportAlgos'),
@ -25,23 +24,23 @@ module.exports = {
}, },
}, },
policies: { policies: {
evaluators: require('./lib/policyEvaluator/evaluator.js'), evaluators: require('./lib/policyEvaluator/evaluator'),
validateUserPolicy: require('./lib/policy/policyValidator') validateUserPolicy: require('./lib/policy/policyValidator')
.validateUserPolicy, .validateUserPolicy,
evaluatePrincipal: require('./lib/policyEvaluator/principal'), evaluatePrincipal: require('./lib/policyEvaluator/principal'),
RequestContext: require('./lib/policyEvaluator/RequestContext.js'), RequestContext: require('./lib/policyEvaluator/RequestContext'),
requestUtils: require('./lib/policyEvaluator/requestUtils'), requestUtils: require('./lib/policyEvaluator/requestUtils'),
actionMaps: require('./lib/policyEvaluator/utils/actionMaps'), actionMaps: require('./lib/policyEvaluator/utils/actionMaps'),
}, },
Clustering: require('./lib/Clustering'), Clustering: require('./lib/Clustering'),
testing: { testing: {
matrix: require('./lib/testing/matrix.js'), matrix: require('./lib/testing/matrix'),
}, },
versioning: { versioning: {
VersioningConstants: require('./lib/versioning/constants.js') VersioningConstants: require('./lib/versioning/constants')
.VersioningConstants, .VersioningConstants,
Version: require('./lib/versioning/Version.js').Version, Version: require('./lib/versioning/Version').Version,
VersionID: require('./lib/versioning/VersionID.js'), VersionID: require('./lib/versioning/VersionID'),
}, },
network: { network: {
http: { http: {
@ -57,8 +56,8 @@ module.exports = {
probe: { probe: {
ProbeServer: require('./lib/network/probe/ProbeServer'), ProbeServer: require('./lib/network/probe/ProbeServer'),
HealthProbeServer: HealthProbeServer:
require('./lib/network/probe/HealthProbeServer.js'), require('./lib/network/probe/HealthProbeServer'),
Utils: require('./lib/network/probe/Utils.js'), Utils: require('./lib/network/probe/Utils'),
}, },
kmip: require('./lib/network/kmip'), kmip: require('./lib/network/kmip'),
kmipClient: require('./lib/network/kmip/Client'), kmipClient: require('./lib/network/kmip/Client'),

View File

@ -1,6 +1,7 @@
'use strict'; // eslint-disable-line 'use strict'; // eslint-disable-line
const cluster = require('cluster'); import * as cluster from 'cluster';
class Clustering { class Clustering {
/** /**
@ -12,20 +13,31 @@ class Clustering {
* releasing ressources * releasing ressources
* @return {Clustering} itself * @return {Clustering} itself
*/ */
constructor(size, logger, shutdownTimeout) { size: number;
this._size = size; shutdownTimeout: number;
logger: any; // TODO logger ???
shutdown: boolean;
workers: cluster.Worker[];
workersTimeout: NodeJS.Timeout[]; // TODO array of worker timeouts
workersStatus: number[];
status: number;
exitCb?: Function;
index?: number;
constructor(size: number, logger: any, shutdownTimeout=5000) {
if (size < 1) { if (size < 1) {
throw new Error('Cluster size must be greater than or equal to 1'); throw new Error('Cluster size must be greater than or equal to 1');
} }
this._shutdownTimeout = shutdownTimeout || 5000; this.size = size;
this._logger = logger; this.shutdownTimeout = shutdownTimeout || 5000;
this._shutdown = false; this.logger = logger;
this._workers = new Array(size).fill(undefined); this.shutdown = false;
this._workersTimeout = new Array(size).fill(undefined); this.workers = new Array(size).fill(undefined);
this._workersStatus = new Array(size).fill(undefined); this.workersTimeout = new Array(size).fill(undefined);
this._status = 0; this.workersStatus = new Array(size).fill(undefined);
this._exitCb = undefined; // Exit callback this.status = 0;
this._index = undefined; this.exitCb = undefined; // Exit callback
this.index = undefined;
} }
/** /**
@ -34,23 +46,23 @@ class Clustering {
* @private * @private
* @return {undefined} * @return {undefined}
*/ */
_afterStop() { _afterStop(): undefined {
// Asuming all workers shutdown gracefully // Asuming all workers shutdown gracefully
this._status = 0; this.status = 0;
const size = this._size; const size = this.size;
for (let i = 0; i < size; ++i) { for (let i = 0; i < size; ++i) {
// If the process return an error code or killed by a signal, // If the process return an error code or killed by a signal,
// set the status // set the status
if (typeof this._workersStatus[i] === 'number') { if (typeof this.workersStatus[i] === 'number') {
this._status = this._workersStatus[i]; this.status = this.workersStatus[i];
break; break;
} else if (typeof this._workersStatus[i] === 'string') { } else if (typeof this.workersStatus[i] === 'string') {
this._status = 1; this.status = 1;
break; break;
} }
} }
if (this._exitCb) { if (this.exitCb) {
return this._exitCb(this); return this.exitCb(this);
} }
return process.exit(this.getStatus()); return process.exit(this.getStatus());
} }
@ -64,45 +76,47 @@ class Clustering {
* @param {string} signal - Exit signal * @param {string} signal - Exit signal
* @return {undefined} * @return {undefined}
*/ */
_workerExited(worker, i, code, signal) { _workerExited(
worker: cluster.Worker, index: number, code: number, signal: number
): undefined {
// If the worker: // If the worker:
// - was killed by a signal // - was killed by a signal
// - return an error code // - return an error code
// - or just stopped // - or just stopped
if (signal) { if (signal) {
this._logger.info('Worker killed by signal', { this.logger.info('Worker killed by signal', {
signal, signal,
id: i, id: index,
childPid: worker.process.pid, childPid: worker.process.pid,
}); });
this._workersStatus[i] = signal; this.workersStatus[index] = signal;
} else if (code !== 0) { } else if (code !== 0) {
this._logger.error('Worker exit with code', { this.logger.error('Worker exit with code', {
code, code,
id: i, id: index,
childPid: worker.process.pid, childPid: worker.process.pid,
}); });
this._workersStatus[i] = code; this.workersStatus[index] = code;
} else { } else {
this._logger.info('Worker shutdown gracefully', { this.logger.info('Worker shutdown gracefully', {
id: i, id: index,
childPid: worker.process.pid, childPid: worker.process.pid,
}); });
this._workersStatus[i] = undefined; this.workersStatus[index] = undefined;
} }
this._workers[i] = undefined; this.workers[index] = undefined;
if (this._workersTimeout[i]) { if (this.workersTimeout[index]) {
clearTimeout(this._workersTimeout[i]); clearTimeout(this.workersTimeout[index]);
this._workersTimeout[i] = undefined; this.workersTimeout[index] = undefined;
} }
// If we don't trigger the stop method, the watchdog // If we don't trigger the stop method, the watchdog
// will autorestart the worker // will autorestart the worker
if (this._shutdown === false) { if (this.shutdown === false) {
return process.nextTick(() => this.startWorker(i)); return process.nextTick(() => this.startWorker(index));
} }
// Check if an worker is still running // Check if an worker is still running
if (!this._workers.every(cur => cur === undefined)) { if (!this.workers.every(cur => cur === undefined)) {
return undefined; return;
} }
return this._afterStop(); return this._afterStop();
} }
@ -113,26 +127,26 @@ class Clustering {
* @param {number} i Index of the starting worker * @param {number} i Index of the starting worker
* @return {undefined} * @return {undefined}
*/ */
startWorker(i) { startWorker(index: number): undefined {
if (!cluster.isMaster) { if (!cluster.isMaster) {
return; return;
} }
// Fork a new worker // Fork a new worker
this._workers[i] = cluster.fork(); this.workers[index] = cluster.fork();
// Listen for message from the worker // Listen for message from the worker
this._workers[i].on('message', msg => { this.workers[index].on('message', msg => {
// If the worker is ready, send him his id // If the worker is ready, send him his id
if (msg === 'ready') { if (msg === 'ready') {
this._workers[i].send({ msg: 'setup', id: i }); this.workers[index].send({ msg: 'setup', id: index });
} }
}); });
this._workers[i].on('exit', (code, signal) => this.workers[index].on('exit', (code, signal) =>
this._workerExited(this._workers[i], i, code, signal)); this._workerExited(this.workers[index], index, code, signal));
// Trigger when the worker was started // Trigger when the worker was started
this._workers[i].on('online', () => { this.workers[index].on('online', () => {
this._logger.info('Worker started', { this.logger.info('Worker started', {
id: i, id: index,
childPid: this._workers[i].process.pid, childPid: this.workers[index].process.pid,
}); });
}); });
} }
@ -143,8 +157,8 @@ class Clustering {
* @param {function} cb - Callback(Clustering, [exitSignal]) * @param {function} cb - Callback(Clustering, [exitSignal])
* @return {Clustering} Itself * @return {Clustering} Itself
*/ */
onExit(cb) { onExit(cb: Function): Clustering {
this._exitCb = cb; this.exitCb = cb;
return this; return this;
} }
@ -155,21 +169,21 @@ class Clustering {
* @param {function} cb - Callback to run the worker * @param {function} cb - Callback to run the worker
* @return {Clustering} itself * @return {Clustering} itself
*/ */
start(cb) { start(cb: Function): Clustering {
process.on('SIGINT', () => this.stop('SIGINT')); process.on('SIGINT', () => this.stop('SIGINT'));
process.on('SIGHUP', () => this.stop('SIGHUP')); process.on('SIGHUP', () => this.stop('SIGHUP'));
process.on('SIGQUIT', () => this.stop('SIGQUIT')); process.on('SIGQUIT', () => this.stop('SIGQUIT'));
process.on('SIGTERM', () => this.stop('SIGTERM')); process.on('SIGTERM', () => this.stop('SIGTERM'));
process.on('SIGPIPE', () => {}); process.on('SIGPIPE', () => {});
process.on('exit', (code, signal) => { process.on('exit', (code, signal) => {
if (this._exitCb) { if (this.exitCb) {
this._status = code || 0; this.status = code || 0;
return this._exitCb(this, signal); return this.exitCb(this, signal);
} }
return process.exit(code || 0); return process.exit(code || 0);
}); });
process.on('uncaughtException', err => { process.on('uncaughtException', err => {
this._logger.fatal('caught error', { this.logger.fatal('caught error', {
error: err.message, error: err.message,
stack: err.stack.split('\n').map(str => str.trim()), stack: err.stack.split('\n').map(str => str.trim()),
}); });
@ -180,7 +194,7 @@ class Clustering {
// know the id of the slave cluster // know the id of the slave cluster
process.on('message', msg => { process.on('message', msg => {
if (msg.msg === 'setup') { if (msg.msg === 'setup') {
this._index = msg.id; this.index = msg.id;
cb(this); cb(this);
} }
}); });
@ -188,7 +202,7 @@ class Clustering {
// the worker has started // the worker has started
process.send('ready'); process.send('ready');
} else { } else {
for (let i = 0; i < this._size; ++i) { for (let i = 0; i < this.size; ++i) {
this.startWorker(i); this.startWorker(i);
} }
} }
@ -200,8 +214,8 @@ class Clustering {
* *
* @return {Cluster.Worker[]} Workers * @return {Cluster.Worker[]} Workers
*/ */
getWorkers() { getWorkers(): cluster.Worker[] {
return this._workers; return this.workers;
} }
/** /**
@ -209,8 +223,8 @@ class Clustering {
* *
* @return {number} Status code * @return {number} Status code
*/ */
getStatus() { getStatus(): number {
return this._status; return this.status;
} }
/** /**
@ -218,8 +232,8 @@ class Clustering {
* *
* @return {boolean} - True if master, false otherwise * @return {boolean} - True if master, false otherwise
*/ */
isMaster() { isMaster(): boolean {
return this._index === undefined; return this.index === undefined;
} }
/** /**
@ -227,8 +241,8 @@ class Clustering {
* *
* @return {number|undefined} Worker index, undefined if it's master * @return {number|undefined} Worker index, undefined if it's master
*/ */
getIndex() { getIndex(): number {
return this._index; return this.index;
} }
/** /**
@ -237,22 +251,22 @@ class Clustering {
* @param {string} signal - Set internally when processes killed by signal * @param {string} signal - Set internally when processes killed by signal
* @return {undefined} * @return {undefined}
*/ */
stop(signal) { stop(signal: string): undefined {
if (!cluster.isMaster) { if (!cluster.isMaster) {
if (this._exitCb) { if (this.exitCb) {
return this._exitCb(this, signal); return this.exitCb(this, signal);
} }
return process.exit(0); return process.exit(0);
} }
this._shutdown = true; this.shutdown = true;
return this._workers.forEach((worker, i) => { return this.workers.forEach((worker, index) => {
if (!worker) { if (!worker) {
return undefined; return undefined;
} }
this._workersTimeout[i] = setTimeout(() => { this.workersTimeout[index] = setTimeout(() => {
// Kill the worker if the sigterm was ignored or take too long // Kill the worker if the sigterm was ignored or take too long
process.kill(worker.process.pid, 'SIGKILL'); process.kill(worker.process.pid, 'SIGKILL');
}, this._shutdownTimeout); }, this.shutdownTimeout);
// Send sigterm to the process, allowing to release ressources // Send sigterm to the process, allowing to release ressources
// and save some states // and save some states
return process.kill(worker.process.pid, 'SIGTERM'); return process.kill(worker.process.pid, 'SIGTERM');
@ -260,4 +274,4 @@ class Clustering {
} }
} }
module.exports = Clustering; export default Clustering;

View File

@ -1,4 +1,4 @@
const assert = require('assert'); import { strict as assert } from 'assert';
/** /**
* @class * @class
@ -12,9 +12,15 @@ class LRUCache {
* @param {number} maxEntries - maximum number of entries kept in * @param {number} maxEntries - maximum number of entries kept in
* the cache * the cache
*/ */
constructor(maxEntries) { maxEntries: number;
private entryCount: number;
private entryMap: object;
private lruHead: any; // TODO lruTrail?
private lruTail: any; // TODO lruTrail?
constructor(maxEntries: number) {
assert(maxEntries >= 1); assert(maxEntries >= 1);
this._maxEntries = maxEntries; this.maxEntries = maxEntries;
this.clear(); this.clear();
} }
@ -27,8 +33,8 @@ class LRUCache {
* @return {boolean} true if the cache contained an entry with * @return {boolean} true if the cache contained an entry with
* this key, false if it did not * this key, false if it did not
*/ */
add(key, value) { add(key: string, value: object): boolean {
let entry = this._entryMap[key]; let entry = this.entryMap[key];
if (entry) { if (entry) {
entry.value = value; entry.value = value;
// make the entry the most recently used by re-pushing it // make the entry the most recently used by re-pushing it
@ -37,15 +43,15 @@ class LRUCache {
this._lruPushEntry(entry); this._lruPushEntry(entry);
return true; return true;
} }
if (this._entryCount === this._maxEntries) { if (this.entryCount === this.maxEntries) {
// if the cache is already full, abide by the LRU strategy // if the cache is already full, abide by the LRU strategy
// and remove the least recently used entry from the cache // and remove the least recently used entry from the cache
// before pushing the new entry // before pushing the new entry
this._removeEntry(this._lruTail); this._removeEntry(this.lruTail);
} }
entry = { key, value }; entry = { key, value };
this._entryMap[key] = entry; this.entryMap[key] = entry;
this._entryCount += 1; this.entryCount += 1;
this._lruPushEntry(entry); this._lruPushEntry(entry);
return false; return false;
} }
@ -59,8 +65,8 @@ class LRUCache {
* exists in the cache, or undefined if not found - either if the * exists in the cache, or undefined if not found - either if the
* key was never added or if it has been evicted from the cache. * key was never added or if it has been evicted from the cache.
*/ */
get(key) { get(key: string): object | undefined{
const entry = this._entryMap[key]; const entry = this.entryMap[key];
if (entry) { if (entry) {
// make the entry the most recently used by re-pushing it // make the entry the most recently used by re-pushing it
// to the head of the LRU list // to the head of the LRU list
@ -79,8 +85,8 @@ class LRUCache {
* there was no entry with this key in the cache - either if the * there was no entry with this key in the cache - either if the
* key was never added or if it has been evicted from the cache. * key was never added or if it has been evicted from the cache.
*/ */
remove(key) { remove(key: string): boolean {
const entry = this._entryMap[key]; const entry = this.entryMap[key];
if (entry) { if (entry) {
this._removeEntry(entry); this._removeEntry(entry);
return true; return true;
@ -93,8 +99,8 @@ class LRUCache {
* *
* @return {number} current number of cached entries * @return {number} current number of cached entries
*/ */
count() { count(): number {
return this._entryCount; return this.entryCount;
} }
/** /**
@ -102,11 +108,11 @@ class LRUCache {
* *
* @return {undefined} * @return {undefined}
*/ */
clear() { clear(): undefined {
this._entryMap = {}; this.entryMap = {};
this._entryCount = 0; this.entryCount = 0;
this._lruHead = null; this.lruHead = null;
this._lruTail = null; this.lruTail = null;
} }
/** /**
@ -116,16 +122,16 @@ class LRUCache {
* @param {object} entry - entry to push * @param {object} entry - entry to push
* @return {undefined} * @return {undefined}
*/ */
_lruPushEntry(entry) { _lruPushEntry(entry: object): undefined {
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
entry._lruNext = this._lruHead; entry._lruNext = this.lruHead;
entry._lruPrev = null; entry._lruPrev = null;
if (this._lruHead) { if (this.lruHead) {
this._lruHead._lruPrev = entry; this.lruHead._lruPrev = entry;
} }
this._lruHead = entry; this.lruHead = entry;
if (!this._lruTail) { if (!this.lruTail) {
this._lruTail = entry; this.lruTail = entry;
} }
/* eslint-enable no-param-reassign */ /* eslint-enable no-param-reassign */
} }
@ -136,17 +142,17 @@ class LRUCache {
* @param {object} entry - entry to remove * @param {object} entry - entry to remove
* @return {undefined} * @return {undefined}
*/ */
_lruRemoveEntry(entry) { _lruRemoveEntry(entry): undefined {
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
if (entry._lruPrev) { if (entry._lruPrev) {
entry._lruPrev._lruNext = entry._lruNext; entry._lruPrev._lruNext = entry._lruNext;
} else { } else {
this._lruHead = entry._lruNext; this.lruHead = entry._lruNext;
} }
if (entry._lruNext) { if (entry._lruNext) {
entry._lruNext._lruPrev = entry._lruPrev; entry._lruNext._lruPrev = entry._lruPrev;
} else { } else {
this._lruTail = entry._lruPrev; this.lruTail = entry._lruPrev;
} }
/* eslint-enable no-param-reassign */ /* eslint-enable no-param-reassign */
} }
@ -157,11 +163,11 @@ class LRUCache {
* @param {object} entry - cache entry to remove * @param {object} entry - cache entry to remove
* @return {undefined} * @return {undefined}
*/ */
_removeEntry(entry) { _removeEntry(entry: object): undefined {
this._lruRemoveEntry(entry); this._lruRemoveEntry(entry);
delete this._entryMap[entry.key]; delete this.entryMap[entry.key];
this._entryCount -= 1; this.entryCount -= 1;
} }
} }
module.exports = LRUCache; export default LRUCache;

View File

@ -1,6 +1,6 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const { FILTER_SKIP, SKIP_NONE } = require('./tools'); import { FILTER_SKIP, SKIP_NONE } from './tools';
// Use a heuristic to amortize the cost of JSON // Use a heuristic to amortize the cost of JSON
// serialization/deserialization only on largest metadata where the // serialization/deserialization only on largest metadata where the
@ -22,7 +22,7 @@ const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
/** /**
* Base class of listing extensions. * Base class of listing extensions.
*/ */
class Extension { export class Extension {
/** /**
* This takes a list of parameters and a logger as the inputs. * This takes a list of parameters and a logger as the inputs.
* Derivatives should have their own format regarding parameters. * Derivatives should have their own format regarding parameters.
@ -31,7 +31,13 @@ class Extension {
* @param {RequestLogger} logger - the logger * @param {RequestLogger} logger - the logger
* @constructor * @constructor
*/ */
constructor(parameters, logger) {
parameters: any;
logger: any;
res: any;
keys: number;
constructor(parameters: any, logger: any) {
// inputs // inputs
this.parameters = parameters; this.parameters = parameters;
this.logger = logger; this.logger = logger;
@ -51,7 +57,7 @@ class Extension {
* heavy unused fields, or left untouched (depending on size * heavy unused fields, or left untouched (depending on size
* heuristics) * heuristics)
*/ */
trimMetadata(value) { trimMetadata(value: string): string {
let ret = undefined; let ret = undefined;
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) { if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
try { try {
@ -81,7 +87,7 @@ class Extension {
* *
* @return {object} - listing parameters for metadata * @return {object} - listing parameters for metadata
*/ */
genMDParams() { genMDParams(): object {
return {}; return {};
} }
@ -96,7 +102,7 @@ class Extension {
* = 0: entry is accepted but not included (skipping) * = 0: entry is accepted but not included (skipping)
* < 0: entry is not accepted, listing should finish * < 0: entry is not accepted, listing should finish
*/ */
filter(entry) { filter(entry: any): number {
return entry ? FILTER_SKIP : FILTER_SKIP; return entry ? FILTER_SKIP : FILTER_SKIP;
} }
@ -108,7 +114,7 @@ class Extension {
* @return {string} - the insight: a common prefix or a master key, * @return {string} - the insight: a common prefix or a master key,
* or SKIP_NONE if there is no insight * or SKIP_NONE if there is no insight
*/ */
skipping() { skipping(): string {
return SKIP_NONE; return SKIP_NONE;
} }
@ -116,9 +122,7 @@ class Extension {
* Get the listing resutls. Format depends on derivatives' specific logic. * Get the listing resutls. Format depends on derivatives' specific logic.
* @return {Array} - The listed elements * @return {Array} - The listed elements
*/ */
result() { result(): any {
return this.res; return this.res;
} }
} }
module.exports.default = Extension;

View File

@ -1,16 +1,37 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const { inc, checkLimit, listingParamsMasterKeysV0ToV1, import { inc, checkLimit, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT } = require('./tools'); FILTER_END, FILTER_ACCEPT } from './tools';
import { VersioningConstants as VSConst} from '../../versioning/constants';
const DEFAULT_MAX_KEYS = 1000; const DEFAULT_MAX_KEYS = 1000;
const VSConst = require('../../versioning/constants').VersioningConstants;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
function numberDefault(num, defaultNum) { function numberDefault(num: string, defaultNum: number): number {
const parsedNum = Number.parseInt(num, 10); const parsedNum = Number.parseInt(num, 10);
return Number.isNaN(parsedNum) ? defaultNum : parsedNum; return Number.isNaN(parsedNum) ? defaultNum : parsedNum;
} }
interface MPUParams {
delimiter: any;
splitter: any;
prefix: any; // TODO type
uploadIdMarker: any; // TODO type
maxKeys: string;
queryPrefixLength: string;
keyMarker?: any; // TODO type
}
interface V0Params {
gt?: string;
gte?: string;
lt?: string;
lte?: string;
}
/** /**
* Class for the MultipartUploads extension * Class for the MultipartUploads extension
*/ */
@ -23,7 +44,22 @@ class MultipartUploads {
* @param {String} [vFormat] - versioning key format * @param {String} [vFormat] - versioning key format
* @return {undefined} * @return {undefined}
*/ */
constructor(params, logger, vFormat) { params: MPUParams; // TODO param type
vFormat: string; // TODO vFormat type
CommonPrefixes: any[]; // TODO commonPrefixes type
Uploads: any[]; // TODO type
IsTruncated: boolean;
NextKeyMarker: string;
NextUploadIdMarker: string;
prefixLength: number;
queryPrefixLength: number;
keys: number;
maxKeys: number;
delimiter: any; // TODO type
splitter: any; // TODO type
logger: any // TODO type
constructor(params: MPUParams, logger: any, vFormat: string) {
this.params = params; this.params = params;
this.vFormat = vFormat || BucketVersioningKeyFormat.v0; this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
this.CommonPrefixes = []; this.CommonPrefixes = [];
@ -51,8 +87,8 @@ class MultipartUploads {
}[this.vFormat]); }[this.vFormat]);
} }
genMDParamsV0() { genMDParamsV0(): V0Params {
const params = {}; const params: V0Params = {};
if (this.params.keyMarker) { if (this.params.keyMarker) {
params.gt = `overview${this.params.splitter}` + params.gt = `overview${this.params.splitter}` +
`${this.params.keyMarker}${this.params.splitter}`; `${this.params.keyMarker}${this.params.splitter}`;
@ -74,6 +110,7 @@ class MultipartUploads {
} }
genMDParamsV1() { genMDParamsV1() {
// TODO v1 params definition
const v0params = this.genMDParamsV0(); const v0params = this.genMDParamsV0();
return listingParamsMasterKeysV0ToV1(v0params); return listingParamsMasterKeysV0ToV1(v0params);
} }
@ -85,7 +122,7 @@ class MultipartUploads {
* @param {String} value - The value of the key * @param {String} value - The value of the key
* @return {undefined} * @return {undefined}
*/ */
addUpload(value) { addUpload(value: string): undefined {
const tmp = JSON.parse(value); const tmp = JSON.parse(value);
this.Uploads.push({ this.Uploads.push({
key: tmp.key, key: tmp.key,
@ -114,7 +151,7 @@ class MultipartUploads {
* @param {String} commonPrefix - The commonPrefix to add * @param {String} commonPrefix - The commonPrefix to add
* @return {undefined} * @return {undefined}
*/ */
addCommonPrefix(commonPrefix) { addCommonPrefix(commonPrefix: string): undefined {
if (this.CommonPrefixes.indexOf(commonPrefix) === -1) { if (this.CommonPrefixes.indexOf(commonPrefix) === -1) {
this.CommonPrefixes.push(commonPrefix); this.CommonPrefixes.push(commonPrefix);
this.NextKeyMarker = commonPrefix; this.NextKeyMarker = commonPrefix;
@ -122,11 +159,11 @@ class MultipartUploads {
} }
} }
getObjectKeyV0(obj) { getObjectKeyV0(obj: any) { // TODO this is an Upload value
return obj.key; return obj.key;
} }
getObjectKeyV1(obj) { getObjectKeyV1(obj: any) { // TODO this is an Upload value
return obj.key.slice(DbPrefixes.Master.length); return obj.key.slice(DbPrefixes.Master.length);
} }
@ -135,14 +172,14 @@ class MultipartUploads {
* @param {String} obj - The key and value of the element * @param {String} obj - The key and value of the element
* @return {number} - > 0: Continue, < 0: Stop * @return {number} - > 0: Continue, < 0: Stop
*/ */
filter(obj) { filter(obj: any): number {
// Check first in case of maxkeys = 0 // Check first in case of maxkeys = 0
if (this.keys >= this.maxKeys) { if (this.keys >= this.maxKeys) {
// In cases of maxKeys <= 0 => IsTruncated = false // In cases of maxKeys <= 0 => IsTruncated = false
this.IsTruncated = this.maxKeys > 0; this.IsTruncated = this.maxKeys > 0;
return FILTER_END; return FILTER_END;
} }
const key = this.getObjectKey(obj); const key = this.getObjectKey(obj); // TODO this is actually valid - see ctor
const value = obj.value; const value = obj.value;
if (this.delimiter) { if (this.delimiter) {
const mpuPrefixSlice = `overview${this.splitter}`.length; const mpuPrefixSlice = `overview${this.splitter}`.length;
@ -162,7 +199,7 @@ class MultipartUploads {
return FILTER_ACCEPT; return FILTER_ACCEPT;
} }
skipping() { skipping(): string {
return ''; return '';
} }
@ -170,7 +207,7 @@ class MultipartUploads {
* Returns the formatted result * Returns the formatted result
* @return {Object} - The result. * @return {Object} - The result.
*/ */
result() { result(): object {
return { return {
CommonPrefixes: this.CommonPrefixes, CommonPrefixes: this.CommonPrefixes,
Uploads: this.Uploads, Uploads: this.Uploads,
@ -183,6 +220,7 @@ class MultipartUploads {
} }
} }
module.exports = { export {
MultipartUploads, MultipartUploads,
}; MPUParams
}

View File

@ -1,14 +1,27 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const Extension = require('./Extension').default; import { Extension } from './Extension';
const { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools'); import { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } from './tools';
const DEFAULT_MAX_KEYS = 10000; const DEFAULT_MAX_KEYS = 10000;
interface ListParams {
maxKeys: number;
filterKey: any; // TODO type
filterKeyStartsWith: any; // TODO type
}
/** /**
* Class of an extension doing the simple listing * Class of an extension doing the simple listing
*/ */
class List extends Extension { class List extends Extension {
maxKeys: number;
filterKey: any;
filterKeyStartsWith: any;
/** /**
* Constructor * Constructor
* Set the logger and the res * Set the logger and the res
@ -16,7 +29,7 @@ class List extends Extension {
* @param {RequestLogger} logger - The logger of the request * @param {RequestLogger} logger - The logger of the request
* @return {undefined} * @return {undefined}
*/ */
constructor(parameters, logger) { constructor(parameters: ListParams, logger: any) {
super(parameters, logger); super(parameters, logger);
this.res = []; this.res = [];
if (parameters) { if (parameters) {
@ -29,7 +42,7 @@ class List extends Extension {
this.keys = 0; this.keys = 0;
} }
genMDParams() { genMDParams(): object {
const params = this.parameters ? { const params = this.parameters ? {
gt: this.parameters.gt, gt: this.parameters.gt,
gte: this.parameters.gte || this.parameters.start, gte: this.parameters.gte || this.parameters.start,
@ -53,7 +66,7 @@ class List extends Extension {
* *
* @return {Boolean} Returns true if matches, else false. * @return {Boolean} Returns true if matches, else false.
*/ */
customFilter(value) { customFilter(value: string): boolean {
let _value; let _value;
try { try {
_value = JSON.parse(value); _value = JSON.parse(value);
@ -90,7 +103,7 @@ class List extends Extension {
* @return {number} - > 0 : continue listing * @return {number} - > 0 : continue listing
* < 0 : listing done * < 0 : listing done
*/ */
filter(elem) { filter(elem: object): number {
// Check first in case of maxkeys <= 0 // Check first in case of maxkeys <= 0
if (this.keys >= this.maxKeys) { if (this.keys >= this.maxKeys) {
return FILTER_END; return FILTER_END;
@ -117,7 +130,7 @@ class List extends Extension {
* Function returning the result * Function returning the result
* @return {Array} - The listed elements * @return {Array} - The listed elements
*/ */
result() { result(): any[] {
return this.res; return this.res;
} }
} }

View File

@ -1,9 +1,9 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const Extension = require('./Extension').default; import { Extension } from './Extension';
const { inc, listingParamsMasterKeysV0ToV1, import { inc, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools'); FILTER_END, FILTER_ACCEPT, FILTER_SKIP } from './tools';
const VSConst = require('../../versioning/constants').VersioningConstants; import { VersioningConstants as VSConst } from '../../versioning/constants';
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
/** /**
@ -14,10 +14,41 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
* @param {Number} delimiterIndex - 'folder' index in the path * @param {Number} delimiterIndex - 'folder' index in the path
* @return {String} - CommonPrefix * @return {String} - CommonPrefix
*/ */
function getCommonPrefix(key, delimiter, delimiterIndex) { function getCommonPrefix(
key: string, delimiter: string, delimiterIndex: number
): string {
return key.substring(0, delimiterIndex + delimiter.length); return key.substring(0, delimiterIndex + delimiter.length);
} }
interface DelimiterParams {
delimiter: string;
prefix: string;
marker: string;
maxKeys: number;
v2: boolean;
startAfter: string;
continuationToken: string;
alphabeticalOrder: boolean;
}
interface DelimiterContentItem {
key: string;
value: string;
}
interface DelimiterResult {
CommonPrefixes: string[];
Contents: DelimiterContentItem[]; // TODO type this.Contents,
IsTruncated: boolean;
Delimiter: string;
NextMarker?: any; // TODO type
NextContinuationToken?: any; // TODO type
}
/** /**
* Handle object listing with parameters * Handle object listing with parameters
* *
@ -55,7 +86,25 @@ class Delimiter extends Extension {
* request * request
* @param {String} [vFormat] - versioning key format * @param {String} [vFormat] - versioning key format
*/ */
constructor(parameters, logger, vFormat) {
delimiter: string;
prefix: string;
marker: string;
maxKeys: number;
startAfter: string;
continuationToken: string;
alphabeticalOrder: boolean;
vFormat: string;
CommonPrefixes: string[];
Contents: DelimiterContentItem[];
IsTruncated: boolean;
NextMarker: string;
NextContinuationToken: string;
startMarker: string;
continueMarker: string;
nextContinueMarker: string;
constructor(parameters: DelimiterParams, logger: any, vFormat: string) {
super(parameters, logger); super(parameters, logger);
// original listing parameters // original listing parameters
this.delimiter = parameters.delimiter; this.delimiter = parameters.delimiter;
@ -134,7 +183,7 @@ class Delimiter extends Extension {
* final state of the result if it is the case * final state of the result if it is the case
* @return {Boolean} - indicates if the iteration has to stop * @return {Boolean} - indicates if the iteration has to stop
*/ */
_reachedMaxKeys() { _reachedMaxKeys(): boolean {
if (this.keys >= this.maxKeys) { if (this.keys >= this.maxKeys) {
// In cases of maxKeys <= 0 -> IsTruncated = false // In cases of maxKeys <= 0 -> IsTruncated = false
this.IsTruncated = this.maxKeys > 0; this.IsTruncated = this.maxKeys > 0;
@ -151,7 +200,7 @@ class Delimiter extends Extension {
* @param {String} value - The value of the key * @param {String} value - The value of the key
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
addContents(key, value) { addContents(key: string, value: string): number {
if (this._reachedMaxKeys()) { if (this._reachedMaxKeys()) {
return FILTER_END; return FILTER_END;
} }
@ -180,7 +229,7 @@ class Delimiter extends Extension {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filter(obj) { filter(obj): number {
const key = this.getObjectKey(obj); const key = this.getObjectKey(obj);
const value = obj.value; const value = obj.value;
if ((this.prefix && !key.startsWith(this.prefix)) if ((this.prefix && !key.startsWith(this.prefix))
@ -206,7 +255,7 @@ class Delimiter extends Extension {
* @param {Number} index - after prefix starting point * @param {Number} index - after prefix starting point
* @return {Boolean} - indicates if iteration should continue * @return {Boolean} - indicates if iteration should continue
*/ */
addCommonPrefix(key, index) { addCommonPrefix(key: string, index: number) {
const commonPrefix = getCommonPrefix(key, this.delimiter, index); const commonPrefix = getCommonPrefix(key, this.delimiter, index);
if (this.CommonPrefixes.indexOf(commonPrefix) === -1 if (this.CommonPrefixes.indexOf(commonPrefix) === -1
&& this[this.nextContinueMarker] !== commonPrefix) { && this[this.nextContinueMarker] !== commonPrefix) {
@ -228,7 +277,7 @@ class Delimiter extends Extension {
* @return {string} - the present range (NextMarker) if repd believes * @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on * that it's enough and should move on
*/ */
skippingV0() { skippingV0(): string {
return this[this.nextContinueMarker]; return this[this.nextContinueMarker];
} }
@ -239,7 +288,7 @@ class Delimiter extends Extension {
* @return {string} - the present range (NextMarker) if repd believes * @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on * that it's enough and should move on
*/ */
skippingV1() { skippingV1(): string {
return DbPrefixes.Master + this[this.nextContinueMarker]; return DbPrefixes.Master + this[this.nextContinueMarker];
} }
@ -249,12 +298,12 @@ class Delimiter extends Extension {
* isn't truncated * isn't truncated
* @return {Object} - following amazon format * @return {Object} - following amazon format
*/ */
result() { result(): DelimiterResult {
/* NextMarker is only provided when delimiter is used. /* NextMarker is only provided when delimiter is used.
* specified in v1 listing documentation * specified in v1 listing documentation
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html * http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
*/ */
const result = { const result: DelimiterResult = {
CommonPrefixes: this.CommonPrefixes, CommonPrefixes: this.CommonPrefixes,
Contents: this.Contents, Contents: this.Contents,
IsTruncated: this.IsTruncated, IsTruncated: this.IsTruncated,
@ -271,4 +320,4 @@ class Delimiter extends Extension {
} }
} }
module.exports = { Delimiter }; export { Delimiter, DelimiterParams };

View File

@ -1,10 +1,11 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const Delimiter = require('./delimiter').Delimiter; import { Delimiter } from './delimiter';
const Version = require('../../versioning/Version').Version; import type { DelimiterParams } from './delimiter';
const VSConst = require('../../versioning/constants').VersioningConstants; import { Version } from '../../versioning/Version';
import { VersioningConstants as VSConst } from '../../versioning/constants';
const { BucketVersioningKeyFormat } = VSConst; const { BucketVersioningKeyFormat } = VSConst;
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools'); import { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } from './tools';
const VID_SEP = VSConst.VersionId.Separator; const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes } = VSConst; const { DbPrefixes } = VSConst;
@ -27,7 +28,11 @@ class DelimiterMaster extends Delimiter {
* @param {RequestLogger} logger - The logger of the request * @param {RequestLogger} logger - The logger of the request
* @param {String} [vFormat] - versioning key format * @param {String} [vFormat] - versioning key format
*/ */
constructor(parameters, logger, vFormat) { prvKey?: any; // TODO type
prvPHDKey?: any; // TODO type
inReplayPrefix?: any; // TODO type
constructor(parameters: DelimiterParams, logger: any, vFormat: string) {
super(parameters, logger, vFormat); super(parameters, logger, vFormat);
// non-PHD master version or a version whose master is a PHD version // non-PHD master version or a version whose master is a PHD version
this.prvKey = undefined; this.prvKey = undefined;
@ -58,7 +63,7 @@ class DelimiterMaster extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV0(obj) { filterV0(obj: object): number {
let key = obj.key; let key = obj.key;
const value = obj.value; const value = obj.value;
@ -155,14 +160,14 @@ class DelimiterMaster extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV1(obj) { filterV1(obj: object): number {
// Filtering master keys in v1 is simply listing the master // Filtering master keys in v1 is simply listing the master
// keys, as the state of version keys do not change the // keys, as the state of version keys do not change the
// result, so we can use Delimiter method directly. // result, so we can use Delimiter method directly.
return super.filter(obj); return super.filter(obj);
} }
skippingBase() { skippingBase(): string {
if (this[this.nextContinueMarker]) { if (this[this.nextContinueMarker]) {
// next marker or next continuation token: // next marker or next continuation token:
// - foo/ : skipping foo/ // - foo/ : skipping foo/
@ -177,14 +182,14 @@ class DelimiterMaster extends Delimiter {
return SKIP_NONE; return SKIP_NONE;
} }
skippingV0() { skippingV0(): string {
if (this.inReplayPrefix) { if (this.inReplayPrefix) {
return DbPrefixes.Replay; return DbPrefixes.Replay;
} }
return this.skippingBase(); return this.skippingBase();
} }
skippingV1() { skippingV1(): string {
const skipTo = this.skippingBase(); const skipTo = this.skippingBase();
if (skipTo === SKIP_NONE) { if (skipTo === SKIP_NONE) {
return SKIP_NONE; return SKIP_NONE;
@ -193,4 +198,4 @@ class DelimiterMaster extends Delimiter {
} }
} }
module.exports = { DelimiterMaster }; export { DelimiterMaster };

View File

@ -1,14 +1,32 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const Delimiter = require('./delimiter').Delimiter; import { Delimiter } from './delimiter';
const Version = require('../../versioning/Version').Version; import type { DelimiterParams } from './delimiter';
const VSConst = require('../../versioning/constants').VersioningConstants; import type { MPUParams } from './MPU';
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = import { Version } from '../../versioning/Version';
require('./tools'); import { VersioningConstants as VSConst } from '../../versioning/constants';
import { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } from './tools';
const VID_SEP = VSConst.VersionId.Separator; const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
interface DelimiterVersionsParams extends DelimiterParams {
keyMarker: string; // TODO type
versionIdMarker: any; // TODO type
}
interface DelimiterVersionsResult {
CommonPrefixes: string[];
Versions: any; // TODO type
IsTruncated: boolean,
NextKeyMarker?: any; // TODO type
NextVersionIdMarker?: any; // TODO type
Delimiter: string;
}
/** /**
* Handle object listing with parameters * Handle object listing with parameters
* *
@ -22,7 +40,15 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
* @prop {Number} maxKeys - number of keys to list * @prop {Number} maxKeys - number of keys to list
*/ */
class DelimiterVersions extends Delimiter { class DelimiterVersions extends Delimiter {
constructor(parameters, logger, vFormat) {
keyMarker: string;
versionIdMarker: any;
masterKey?: any; // TODO type
masterVersionId?: any; // TODO type
NextVersionIdMarker: any; // TODO type
inReplayPrefix: boolean;
constructor(parameters: DelimiterVersionsParams, logger: any, vFormat: string) {
super(parameters, logger, vFormat); super(parameters, logger, vFormat);
// specific to version listing // specific to version listing
this.keyMarker = parameters.keyMarker; this.keyMarker = parameters.keyMarker;
@ -49,7 +75,7 @@ class DelimiterVersions extends Delimiter {
}[this.vFormat]); }[this.vFormat]);
} }
genMDParamsV0() { genMDParamsV0(): MPUParams {
const params = {}; const params = {};
if (this.parameters.prefix) { if (this.parameters.prefix) {
params.gte = this.parameters.prefix; params.gte = this.parameters.prefix;
@ -73,40 +99,41 @@ class DelimiterVersions extends Delimiter {
return params; return params;
} }
genMDParamsV1() { genMDParamsV1(): MPUParams[] {
// return an array of two listing params sets to ask for // return an array of two listing params sets to ask for
// synchronized listing of M and V ranges // synchronized listing of M and V ranges
const params = [{}, {}]; const mRangeParams: MPUParams = {};
const vRangeParams: MPUParams = {};
if (this.parameters.prefix) { if (this.parameters.prefix) {
params[0].gte = DbPrefixes.Master + this.parameters.prefix; mRangeParams.gte = DbPrefixes.Master + this.parameters.prefix;
params[0].lt = DbPrefixes.Master + inc(this.parameters.prefix); mRangeParams.lt = DbPrefixes.Master + inc(this.parameters.prefix);
params[1].gte = DbPrefixes.Version + this.parameters.prefix; vRangeParams.gte = DbPrefixes.Version + this.parameters.prefix;
params[1].lt = DbPrefixes.Version + inc(this.parameters.prefix); vRangeParams.lt = DbPrefixes.Version + inc(this.parameters.prefix);
} else { } else {
params[0].gte = DbPrefixes.Master; mRangeParams.gte = DbPrefixes.Master;
params[0].lt = inc(DbPrefixes.Master); // stop after the last master key mRangeParams.lt = inc(DbPrefixes.Master); // stop after the last master key
params[1].gte = DbPrefixes.Version; vRangeParams.gte = DbPrefixes.Version;
params[1].lt = inc(DbPrefixes.Version); // stop after the last version key vRangeParams.lt = inc(DbPrefixes.Version); // stop after the last version key
} }
if (this.parameters.keyMarker) { if (this.parameters.keyMarker) {
if (params[1].gte <= DbPrefixes.Version + this.parameters.keyMarker) { if (vRangeParams.gte <= DbPrefixes.Version + this.parameters.keyMarker) {
delete params[0].gte; delete mRangeParams.gte;
delete params[1].gte; delete vRangeParams.gte;
params[0].gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP); mRangeParams.gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP);
if (this.parameters.versionIdMarker) { if (this.parameters.versionIdMarker) {
// versionIdMarker should always come with keyMarker // versionIdMarker should always come with keyMarker
// but may not be the other way around // but may not be the other way around
params[1].gt = DbPrefixes.Version vRangeParams.gt = DbPrefixes.Version
+ this.parameters.keyMarker + this.parameters.keyMarker
+ VID_SEP + VID_SEP
+ this.parameters.versionIdMarker; + this.parameters.versionIdMarker;
} else { } else {
params[1].gt = DbPrefixes.Version vRangeParams.gt = DbPrefixes.Version
+ inc(this.parameters.keyMarker + VID_SEP); + inc(this.parameters.keyMarker + VID_SEP);
} }
} }
} }
return params; return [mRangeParams, vRangeParams];
} }
/** /**
@ -120,7 +147,7 @@ class DelimiterVersions extends Delimiter {
* * -1 if master key < version key * * -1 if master key < version key
* * 1 if master key > version key * * 1 if master key > version key
*/ */
compareObjects(masterObj, versionObj) { compareObjects(masterObj: object, versionObj: object): number {
const masterKey = masterObj.key.slice(DbPrefixes.Master.length); const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
const versionKey = versionObj.key.slice(DbPrefixes.Version.length); const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
return masterKey < versionKey ? -1 : 1; return masterKey < versionKey ? -1 : 1;
@ -136,7 +163,7 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the key * @param {String} obj.value - The value of the key
* @return {Boolean} - indicates if iteration should continue * @return {Boolean} - indicates if iteration should continue
*/ */
addContents(obj) { addContents(obj: object): boolean {
if (this._reachedMaxKeys()) { if (this._reachedMaxKeys()) {
return FILTER_END; return FILTER_END;
} }
@ -163,7 +190,7 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV0(obj) { filterV0(obj: object): number {
if (obj.key.startsWith(DbPrefixes.Replay)) { if (obj.key.startsWith(DbPrefixes.Replay)) {
this.inReplayPrefix = true; this.inReplayPrefix = true;
return FILTER_SKIP; return FILTER_SKIP;
@ -189,7 +216,7 @@ class DelimiterVersions extends Delimiter {
* @param {String} obj.value - The value of the element * @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filterV1(obj) { filterV1(obj: object): number {
// this function receives both M and V keys, but their prefix // this function receives both M and V keys, but their prefix
// length is the same so we can remove their prefix without // length is the same so we can remove their prefix without
// looking at the type of key // looking at the type of key
@ -197,7 +224,7 @@ class DelimiterVersions extends Delimiter {
obj.value); obj.value);
} }
filterCommon(key, value) { filterCommon(key: string, value: string): boolean {
if (this.prefix && !key.startsWith(this.prefix)) { if (this.prefix && !key.startsWith(this.prefix)) {
return FILTER_SKIP; return FILTER_SKIP;
} }
@ -230,7 +257,7 @@ class DelimiterVersions extends Delimiter {
return this.addContents({ key: nonversionedKey, value, versionId }); return this.addContents({ key: nonversionedKey, value, versionId });
} }
skippingV0() { skippingV0(): string {
if (this.inReplayPrefix) { if (this.inReplayPrefix) {
return DbPrefixes.Replay; return DbPrefixes.Replay;
} }
@ -243,7 +270,7 @@ class DelimiterVersions extends Delimiter {
return SKIP_NONE; return SKIP_NONE;
} }
skippingV1() { skippingV1(): string {
const skipV0 = this.skippingV0(); const skipV0 = this.skippingV0();
if (skipV0 === SKIP_NONE) { if (skipV0 === SKIP_NONE) {
return SKIP_NONE; return SKIP_NONE;
@ -259,7 +286,7 @@ class DelimiterVersions extends Delimiter {
* isn't truncated * isn't truncated
* @return {Object} - following amazon format * @return {Object} - following amazon format
*/ */
result() { result(): DelimiterVersionsResult {
/* NextMarker is only provided when delimiter is used. /* NextMarker is only provided when delimiter is used.
* specified in v1 listing documentation * specified in v1 listing documentation
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html * http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
@ -276,4 +303,4 @@ class DelimiterVersions extends Delimiter {
} }
} }
module.exports = { DelimiterVersions }; export { DelimiterVersions };

View File

@ -1,9 +0,0 @@
module.exports = {
Basic: require('./basic').List,
Delimiter: require('./delimiter').Delimiter,
DelimiterVersions: require('./delimiterVersions')
.DelimiterVersions,
DelimiterMaster: require('./delimiterMaster')
.DelimiterMaster,
MPU: require('./MPU').MultipartUploads,
};

View File

@ -0,0 +1,13 @@
import { List as Basic } from './basic';
import { Delimiter } from './delimiter';
import { DelimiterVersions } from './delimiterVersions';
import { DelimiterMaster } from './delimiterMaster';
import { MultipartUploads as MPU } from './MPU';
export {
Basic,
Delimiter,
DelimiterVersions,
DelimiterMaster,
MPU,
};

View File

@ -1,10 +1,15 @@
const assert = require('assert'); import { strict as assert } from 'assert';
const { FILTER_END, FILTER_SKIP, SKIP_NONE } = require('./tools'); import { FILTER_END, FILTER_SKIP, SKIP_NONE } from './tools';
const MAX_STREAK_LENGTH = 100; const MAX_STREAK_LENGTH = 100;
interface SkipParams {
extension: any;
gte: any;
}
/** /**
* Handle the filtering and the skip mechanism of a listing result. * Handle the filtering and the skip mechanism of a listing result.
*/ */
@ -15,14 +20,23 @@ class Skip {
* @param {String} params.gte - current range gte (greater than or * @param {String} params.gte - current range gte (greater than or
* equal) used by the client code * equal) used by the client code
*/ */
constructor(params) {
extension: any;
gteParams: any;
listingEndCb?: Function;
skipRangeCb?: Function;
streakLength: number;
constructor(params: SkipParams) {
// TODO - once we're in strict TS everywhere, we no longer need these
// assertions
assert(params.extension); assert(params.extension);
this.extension = params.extension; this.extension = params.extension;
this.gteParams = params.gte; this.gteParams = params.gte;
this.listingEndCb = null; this.listingEndCb = undefined;
this.skipRangeCb = null; this.skipRangeCb = undefined;
/* Used to count consecutive FILTER_SKIP returned by the extension /* Used to count consecutive FILTER_SKIP returned by the extension
* filter method. Once this counter reaches MAX_STREAK_LENGTH, the * filter method. Once this counter reaches MAX_STREAK_LENGTH, the
@ -31,11 +45,11 @@ class Skip {
this.streakLength = 0; this.streakLength = 0;
} }
setListingEndCb(cb) { setListingEndCb(cb: Function) {
this.listingEndCb = cb; this.listingEndCb = cb;
} }
setSkipRangeCb(cb) { setSkipRangeCb(cb: Function) {
this.skipRangeCb = cb; this.skipRangeCb = cb;
} }
@ -47,9 +61,9 @@ class Skip {
* This function calls the listing end or the skip range callbacks if * This function calls the listing end or the skip range callbacks if
* needed. * needed.
*/ */
filter(entry) { filter(entry: object): undefined {
assert(this.listingEndCb); assert(this.listingEndCb !== undefined);
assert(this.skipRangeCb); assert(this.skipRangeCb !== undefined);
const filteringResult = this.extension.filter(entry); const filteringResult = this.extension.filter(entry);
const skippingRange = this.extension.skipping(); const skippingRange = this.extension.skipping();
@ -73,7 +87,7 @@ class Skip {
} }
} }
_inc(str) { _inc(str: string): string {
if (!str) { if (!str) {
return str; return str;
} }
@ -84,5 +98,7 @@ class Skip {
} }
} }
export {
module.exports = Skip; Skip,
SkipParams
}

View File

@ -1,4 +1,6 @@
const { DbPrefixes } = require('../../versioning/constants').VersioningConstants; import { VersioningConstants } from '../../versioning/constants';
const DbPrefixes = VersioningConstants.DbPrefixes;
// constants for extensions // constants for extensions
const SKIP_NONE = undefined; // to be inline with the values of NextMarker const SKIP_NONE = undefined; // to be inline with the values of NextMarker
@ -15,8 +17,8 @@ const FILTER_END = -1;
* @param {Number} limit - The limit to respect * @param {Number} limit - The limit to respect
* @return {Number} - The parsed number || limit * @return {Number} - The parsed number || limit
*/ */
function checkLimit(number, limit) { function checkLimit(str: string, limit: number): number {
const parsed = Number.parseInt(number, 10); const parsed = Number.parseInt(str, 10);
const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit); const valid = !Number.isNaN(parsed) && (!limit || parsed <= limit);
return valid ? parsed : limit; return valid ? parsed : limit;
} }
@ -28,7 +30,7 @@ function checkLimit(number, limit) {
* @return {string} - the incremented string * @return {string} - the incremented string
* or the input if it is not valid * or the input if it is not valid
*/ */
function inc(str) { function inc(str: string): string {
return str ? (str.slice(0, str.length - 1) + return str ? (str.slice(0, str.length - 1) +
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str; String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
} }
@ -40,7 +42,7 @@ function inc(str) {
* @param {object} v0params - listing parameters for v0 format * @param {object} v0params - listing parameters for v0 format
* @return {object} - listing parameters for v1 format * @return {object} - listing parameters for v1 format
*/ */
function listingParamsMasterKeysV0ToV1(v0params) { function listingParamsMasterKeysV0ToV1(v0params: any): any {
const v1params = Object.assign({}, v0params); const v1params = Object.assign({}, v0params);
if (v0params.gt !== undefined) { if (v0params.gt !== undefined) {
v1params.gt = `${DbPrefixes.Master}${v0params.gt}`; v1params.gt = `${DbPrefixes.Master}${v0params.gt}`;
@ -59,7 +61,7 @@ function listingParamsMasterKeysV0ToV1(v0params) {
return v1params; return v1params;
} }
module.exports = { export {
checkLimit, checkLimit,
inc, inc,
listingParamsMasterKeysV0ToV1, listingParamsMasterKeysV0ToV1,

View File

@ -91,7 +91,7 @@ class Vault {
requestContext: serializedRCsArr, requestContext: serializedRCsArr,
}, },
(err, userInfo) => vaultSignatureCb(err, userInfo, (err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback) params.log, callback),
); );
} }
@ -146,7 +146,7 @@ class Vault {
requestContext: serializedRCs, requestContext: serializedRCs,
}, },
(err, userInfo) => vaultSignatureCb(err, userInfo, (err, userInfo) => vaultSignatureCb(err, userInfo,
params.log, callback, streamingV4Params) params.log, callback, streamingV4Params),
); );
} }
@ -232,28 +232,28 @@ class Vault {
*/ */
getAccountIds(canonicalIDs, log, callback) { getAccountIds(canonicalIDs, log, callback) {
log.trace('getting accountIds from Vault based on canonicalIDs', log.trace('getting accountIds from Vault based on canonicalIDs',
{ canonicalIDs }); { canonicalIDs });
this.client.getAccountIds(canonicalIDs, this.client.getAccountIds(canonicalIDs,
{ reqUid: log.getSerializedUids() }, { reqUid: log.getSerializedUids() },
(err, info) => { (err, info) => {
if (err) { if (err) {
log.debug('received error message from vault', log.debug('received error message from vault',
{ errorMessage: err }); { errorMessage: err });
return callback(err); return callback(err);
}
const infoFromVault = info.message.body;
log.trace('info received from vault', { infoFromVault });
const result = {};
/* If the accountId was not found in Vault, do not
send the canonicalID back to the API */
Object.keys(infoFromVault).forEach(key => {
if (infoFromVault[key] !== 'NotFound' &&
infoFromVault[key] !== 'WrongFormat') {
result[key] = infoFromVault[key];
} }
const infoFromVault = info.message.body;
log.trace('info received from vault', { infoFromVault });
const result = {};
/* If the accountId was not found in Vault, do not
send the canonicalID back to the API */
Object.keys(infoFromVault).forEach(key => {
if (infoFromVault[key] !== 'NotFound' &&
infoFromVault[key] !== 'WrongFormat') {
result[key] = infoFromVault[key];
}
});
return callback(null, result);
}); });
return callback(null, result);
});
} }
/** checkPolicies -- call Vault to evaluate policies /** checkPolicies -- call Vault to evaluate policies

View File

@ -74,7 +74,7 @@ function extractParams(request, log, awsService, data) {
version = 'v4'; version = 'v4';
} else { } else {
log.trace('invalid authorization security header', log.trace('invalid authorization security header',
{ header: authHeader }); { header: authHeader });
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
} else if (data.Signature) { } else if (data.Signature) {
@ -89,7 +89,7 @@ function extractParams(request, log, awsService, data) {
if (version !== null && method !== null) { if (version !== null && method !== null) {
if (!checkFunctions[version] || !checkFunctions[version][method]) { if (!checkFunctions[version] || !checkFunctions[version][method]) {
log.trace('invalid auth version or method', log.trace('invalid auth version or method',
{ version, authMethod: method }); { version, authMethod: method });
return { err: errors.NotImplemented }; return { err: errors.NotImplemented };
} }
log.trace('identified auth method', { version, authMethod: method }); log.trace('identified auth method', { version, authMethod: method });
@ -159,7 +159,7 @@ function doAuth(request, log, cb, awsService, requestContexts) {
* @return {undefined} * @return {undefined}
*/ */
function generateV4Headers(request, data, accessKey, secretKeyValue, function generateV4Headers(request, data, accessKey, secretKeyValue,
awsService, proxyPath) { awsService, proxyPath) {
Object.assign(request, { headers: {} }); Object.assign(request, { headers: {} });
const amzDate = convertUTCtoISO8601(Date.now()); const amzDate = convertUTCtoISO8601(Date.now());
// get date without time // get date without time
@ -187,16 +187,16 @@ function generateV4Headers(request, data, accessKey, secretKeyValue,
.filter(headerName => .filter(headerName =>
headerName.startsWith('x-amz-') headerName.startsWith('x-amz-')
|| headerName.startsWith('x-scal-') || headerName.startsWith('x-scal-')
|| headerName === 'host' || headerName === 'host',
).sort().join(';'); ).sort().join(';');
const params = { request, signedHeaders, payloadChecksum, const params = { request, signedHeaders, payloadChecksum,
credentialScope, timestamp, query: data, credentialScope, timestamp, query: data,
awsService: service, proxyPath }; awsService: service, proxyPath };
const stringToSign = constructStringToSignV4(params); const stringToSign = constructStringToSignV4(params);
const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue, const signingKey = vaultUtilities.calculateSigningKey(secretKeyValue,
region, region,
scopeDate, scopeDate,
service); service);
const signature = crypto.createHmac('sha256', signingKey) const signature = crypto.createHmac('sha256', signingKey)
.update(stringToSign, 'binary').digest('hex'); .update(stringToSign, 'binary').digest('hex');
const authorizationHeader = `${algorithm} Credential=${accessKey}` + const authorizationHeader = `${algorithm} Credential=${accessKey}` +

View File

@ -29,7 +29,7 @@ class ChainBackend extends BaseBackend {
typeof client.getCanonicalIds === 'function' && typeof client.getCanonicalIds === 'function' &&
typeof client.getEmailAddresses === 'function' && typeof client.getEmailAddresses === 'function' &&
typeof client.checkPolicies === 'function' && typeof client.checkPolicies === 'function' &&
typeof client.healthcheck === 'function' typeof client.healthcheck === 'function',
), 'invalid client: missing required auth backend methods'); ), 'invalid client: missing required auth backend methods');
this._clients = clients; this._clients = clients;
} }
@ -55,7 +55,7 @@ class ChainBackend extends BaseBackend {
signatureFromRequest, signatureFromRequest,
accessKey, accessKey,
options, options,
done done,
), callback); ), callback);
} }
@ -67,7 +67,7 @@ class ChainBackend extends BaseBackend {
region, region,
scopeDate, scopeDate,
options, options,
done done,
), callback); ), callback);
} }
@ -151,7 +151,7 @@ class ChainBackend extends BaseBackend {
requestContextParams, requestContextParams,
userArn, userArn,
options, options,
done done,
), (err, res) => { ), (err, res) => {
if (err) { if (err) {
return callback(err); return callback(err);
@ -169,8 +169,8 @@ class ChainBackend extends BaseBackend {
client.healthcheck(reqUid, (err, res) => done(null, { client.healthcheck(reqUid, (err, res) => done(null, {
error: !!err ? err : null, error: !!err ? err : null,
status: res, status: res,
}) }),
), (err, res) => { ), (err, res) => {
if (err) { if (err) {
return callback(err); return callback(err);
} }

View File

@ -26,20 +26,20 @@ class AuthLoader {
.required(); .required();
const accountsJoi = joi.array() const accountsJoi = joi.array()
.items({ .items({
name: joi.string().required(), name: joi.string().required(),
email: joi.string().email().required(), email: joi.string().email().required(),
arn: joi.string().required(), arn: joi.string().required(),
canonicalID: joi.string().required(), canonicalID: joi.string().required(),
shortid: joi.string().regex(/^[0-9]{12}$/).required(), shortid: joi.string().regex(/^[0-9]{12}$/).required(),
keys: this._joiKeysValidator, keys: this._joiKeysValidator,
// backward-compat // backward-compat
users: joi.array(), users: joi.array(),
}) })
.required() .required()
.unique('arn') .unique('arn')
.unique('email') .unique('email')
.unique('canonicalID'); .unique('canonicalID');
this._joiValidator = joi.object({ accounts: accountsJoi }); this._joiValidator = joi.object({ accounts: accountsJoi });
} }
@ -136,7 +136,7 @@ class AuthLoader {
_validateData(authData, filePath) { _validateData(authData, filePath) {
const res = joi.validate(authData, this._joiValidator, const res = joi.validate(authData, this._joiValidator,
{ abortEarly: false }); { abortEarly: false });
if (res.error) { if (res.error) {
this._dumpJoiErrors(res.error.details, filePath); this._dumpJoiErrors(res.error.details, filePath);
return false; return false;
@ -156,7 +156,7 @@ class AuthLoader {
'master/conf/authdata.json). Also note that support ' + 'master/conf/authdata.json). Also note that support ' +
'for account users has been dropped.', 'for account users has been dropped.',
{ accountName: account.name, accountArn: account.arn, { accountName: account.name, accountArn: account.arn,
filePath }); filePath });
arnError = true; arnError = true;
return; return;
} }
@ -167,7 +167,7 @@ class AuthLoader {
'https://github.com/scality/S3/blob/master/conf/' + 'https://github.com/scality/S3/blob/master/conf/' +
'authdata.json)', 'authdata.json)',
{ accountName: account.name, accountArn: account.arn, { accountName: account.name, accountArn: account.arn,
filePath }); filePath });
arnError = true; arnError = true;
return; return;
} }
@ -176,8 +176,8 @@ class AuthLoader {
this._log.error( this._log.error(
'authentication config validation error', 'authentication config validation error',
{ reason: arnObj.error.description, { reason: arnObj.error.description,
accountName: account.name, accountArn: account.arn, accountName: account.name, accountArn: account.arn,
filePath }); filePath });
arnError = true; arnError = true;
return; return;
} }
@ -185,8 +185,8 @@ class AuthLoader {
this._log.error( this._log.error(
'authentication config validation error', 'authentication config validation error',
{ reason: 'not an IAM account ARN', { reason: 'not an IAM account ARN',
accountName: account.name, accountArn: account.arn, accountName: account.name, accountArn: account.arn,
filePath }); filePath });
arnError = true; arnError = true;
return; return;
} }
@ -215,7 +215,7 @@ class AuthLoader {
logInfo.context = err.context; logInfo.context = err.context;
} }
this._log.error('authentication config validation error', this._log.error('authentication config validation error',
logInfo); logInfo);
}); });
} }
} }

View File

@ -41,7 +41,7 @@ function getCanonicalizedAmzHeaders(headers, clientType) {
// Build headerString // Build headerString
return amzHeaders.reduce((headerStr, current) => return amzHeaders.reduce((headerStr, current) =>
`${headerStr}${current[0]}:${current[1]}\n`, `${headerStr}${current[0]}:${current[1]}\n`,
''); '');
} }
module.exports = getCanonicalizedAmzHeaders; module.exports = getCanonicalizedAmzHeaders;

View File

@ -22,9 +22,9 @@ function check(request, log, data) {
timestamp = Date.parse(timestamp); timestamp = Date.parse(timestamp);
if (!timestamp) { if (!timestamp) {
log.debug('missing or invalid date header', log.debug('missing or invalid date header',
{ method: 'auth/v2/headerAuthCheck.check' }); { method: 'auth/v2/headerAuthCheck.check' });
return { err: errors.AccessDenied. return { err: errors.AccessDenied.
customizeDescription('Authentication requires a valid Date or ' + customizeDescription('Authentication requires a valid Date or ' +
'x-amz-date header') }; 'x-amz-date header') };
} }

View File

@ -42,12 +42,12 @@ function check(request, log, data) {
if (expirationTime > currentTime + preSignedURLExpiry) { if (expirationTime > currentTime + preSignedURLExpiry) {
log.debug('expires parameter too far in future', log.debug('expires parameter too far in future',
{ expires: request.query.Expires }); { expires: request.query.Expires });
return { err: errors.AccessDenied }; return { err: errors.AccessDenied };
} }
if (currentTime > expirationTime) { if (currentTime > expirationTime) {
log.debug('current time exceeds expires time', log.debug('current time exceeds expires time',
{ expires: request.query.Expires }); { expires: request.query.Expires });
return { err: errors.RequestTimeTooSkewed }; return { err: errors.RequestTimeTooSkewed };
} }
const accessKey = data.AWSAccessKeyId; const accessKey = data.AWSAccessKeyId;

View File

@ -88,14 +88,14 @@ function check(request, log, data, awsService) {
} }
if (!timestamp) { if (!timestamp) {
log.debug('missing or invalid date header', log.debug('missing or invalid date header',
{ method: 'auth/v4/headerAuthCheck.check' }); { method: 'auth/v4/headerAuthCheck.check' });
return { err: errors.AccessDenied. return { err: errors.AccessDenied.
customizeDescription('Authentication requires a valid Date or ' + customizeDescription('Authentication requires a valid Date or ' +
'x-amz-date header') }; 'x-amz-date header') };
} }
const validationResult = validateCredentials(credentialsArr, timestamp, const validationResult = validateCredentials(credentialsArr, timestamp,
log); log);
if (validationResult instanceof Error) { if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credentialsArr, log.debug('credentials in improper format', { credentialsArr,
timestamp, validationResult }); timestamp, validationResult });
@ -134,7 +134,7 @@ function check(request, log, data, awsService) {
} catch (err) { } catch (err) {
log.debug('invalid proxy_path header', { proxyPath, err }); log.debug('invalid proxy_path header', { proxyPath, err });
return { err: errors.InvalidArgument.customizeDescription( return { err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header') }; 'invalid proxy_path header') };
} }
} }

View File

@ -45,7 +45,7 @@ function check(request, log, data) {
} }
const validationResult = validateCredentials(credential, timestamp, const validationResult = validateCredentials(credential, timestamp,
log); log);
if (validationResult instanceof Error) { if (validationResult instanceof Error) {
log.debug('credentials in improper format', { credential, log.debug('credentials in improper format', { credential,
timestamp, validationResult }); timestamp, validationResult });
@ -69,7 +69,7 @@ function check(request, log, data) {
} catch (err) { } catch (err) {
log.debug('invalid proxy_path header', { proxyPath }); log.debug('invalid proxy_path header', { proxyPath });
return { err: errors.InvalidArgument.customizeDescription( return { err: errors.InvalidArgument.customizeDescription(
'invalid proxy_path header') }; 'invalid proxy_path header') };
} }
} }

View File

@ -273,7 +273,7 @@ class V4Transform extends Transform {
} }
// get next chunk // get next chunk
return callback(); return callback();
} },
); );
} }
} }

View File

@ -25,20 +25,20 @@ function validateCredentials(credentials, timestamp, log) {
log.warn('accessKey provided is wrong format', { accessKey }); log.warn('accessKey provided is wrong format', { accessKey });
return errors.InvalidArgument; return errors.InvalidArgument;
} }
// The scope date (format YYYYMMDD) must be same date as the timestamp // The scope date (format YYYYMMDD) must be same date as the timestamp
// on the request from the x-amz-date param (if queryAuthCheck) // on the request from the x-amz-date param (if queryAuthCheck)
// or from the x-amz-date header or date header (if headerAuthCheck) // or from the x-amz-date header or date header (if headerAuthCheck)
// Format of timestamp is ISO 8601: YYYYMMDDTHHMMSSZ. // Format of timestamp is ISO 8601: YYYYMMDDTHHMMSSZ.
// http://docs.aws.amazon.com/AmazonS3/latest/API/ // http://docs.aws.amazon.com/AmazonS3/latest/API/
// sigv4-query-string-auth.html // sigv4-query-string-auth.html
// http://docs.aws.amazon.com/general/latest/gr/ // http://docs.aws.amazon.com/general/latest/gr/
// sigv4-date-handling.html // sigv4-date-handling.html
// convert timestamp to format of scopeDate YYYYMMDD // convert timestamp to format of scopeDate YYYYMMDD
const timestampDate = timestamp.split('T')[0]; const timestampDate = timestamp.split('T')[0];
if (scopeDate.length !== 8 || scopeDate !== timestampDate) { if (scopeDate.length !== 8 || scopeDate !== timestampDate) {
log.warn('scope date must be the same date as the timestamp date', log.warn('scope date must be the same date as the timestamp date',
{ scopeDate, timestampDate }); { scopeDate, timestampDate });
return errors.RequestTimeTooSkewed; return errors.RequestTimeTooSkewed;
} }
if (service !== 's3' && service !== 'iam' && service !== 'ring' && if (service !== 's3' && service !== 'iam' && service !== 'ring' &&
@ -50,7 +50,7 @@ function validateCredentials(credentials, timestamp, log) {
} }
if (requestType !== 'aws4_request') { if (requestType !== 'aws4_request') {
log.warn('requestType contained in params is not aws4_request', log.warn('requestType contained in params is not aws4_request',
{ requestType }); { requestType });
return errors.InvalidArgument; return errors.InvalidArgument;
} }
return {}; return {};
@ -68,7 +68,7 @@ function extractQueryParams(queryObj, log) {
// Do not need the algorithm sent back // Do not need the algorithm sent back
if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') { if (queryObj['X-Amz-Algorithm'] !== 'AWS4-HMAC-SHA256') {
log.warn('algorithm param incorrect', log.warn('algorithm param incorrect',
{ algo: queryObj['X-Amz-Algorithm'] }); { algo: queryObj['X-Amz-Algorithm'] });
return authParams; return authParams;
} }

View File

@ -1,5 +1,6 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const crypto = require('crypto');
import { createHash } from 'crypto';
// The min value here is to manage further backward compat if we // The min value here is to manage further backward compat if we
// need it // need it
@ -10,7 +11,7 @@ const iamSecurityTokenPattern =
new RegExp(`^[a-f0-9]{${iamSecurityTokenSizeMin},` + new RegExp(`^[a-f0-9]{${iamSecurityTokenSizeMin},` +
`${iamSecurityTokenSizeMax}}$`); `${iamSecurityTokenSizeMax}}$`);
module.exports = { export default {
// info about the iam security token // info about the iam security token
iamSecurityToken: { iamSecurityToken: {
min: iamSecurityTokenSizeMin, min: iamSecurityTokenSizeMin,
@ -92,7 +93,7 @@ module.exports = {
replicationBackends: { aws_s3: true, azure: true, gcp: true }, replicationBackends: { aws_s3: true, azure: true, gcp: true },
// hex digest of sha256 hash of empty string: // hex digest of sha256 hash of empty string:
emptyStringHash: crypto.createHash('sha256') emptyStringHash: createHash('sha256')
.update('', 'binary').digest('hex'), .update('', 'binary').digest('hex'),
mpuMDStoredExternallyBackend: { aws_s3: true, gcp: true }, mpuMDStoredExternallyBackend: { aws_s3: true, gcp: true },
// AWS sets a minimum size limit for parts except for the last part. // AWS sets a minimum size limit for parts except for the last part.

182
lib/db.js
View File

@ -1,182 +0,0 @@
'use strict'; // eslint-disable-line strict
const writeOptions = { sync: true };
/**
* Like Error, but with a property set to true.
* TODO: this is copied from kineticlib, should consolidate with the
* future errors module
*
* Example: instead of:
* const err = new Error("input is not a buffer");
* err.badTypeInput = true;
* throw err;
* use:
* throw propError("badTypeInput", "input is not a buffer");
*
* @param {String} propName - the property name.
* @param {String} message - the Error message.
* @returns {Error} the Error object.
*/
function propError(propName, message) {
const err = new Error(message);
err[propName] = true;
return err;
}
/**
* Running transaction with multiple updates to be committed atomically
*/
class IndexTransaction {
/**
* Builds a new transaction
*
* @argument {Leveldb} db an open database to which the updates
* will be applied
*
* @returns {IndexTransaction} a new empty transaction
*/
constructor(db) {
this.operations = [];
this.db = db;
this.closed = false;
this.conditions = [];
}
/**
* Adds a new operation to participate in this running transaction
*
* @argument {object} op an object with the following attributes:
* {
* type: 'put' or 'del',
* key: the object key,
* value: (optional for del) the value to store,
* }
*
* @throws {Error} an error described by the following properties
* - invalidTransactionVerb if op is not put or del
* - pushOnCommittedTransaction if already committed
* - missingKey if the key is missing from the op
* - missingValue if putting without a value
*
* @returns {undefined}
*/
push(op) {
if (this.closed) {
throw propError('pushOnCommittedTransaction',
'can not add ops to already committed transaction');
}
if (op.type !== 'put' && op.type !== 'del') {
throw propError('invalidTransactionVerb',
`unknown action type: ${op.type}`);
}
if (op.key === undefined) {
throw propError('missingKey', 'missing key');
}
if (op.type === 'put' && op.value === undefined) {
throw propError('missingValue', 'missing value');
}
this.operations.push(op);
}
/**
* Adds a new put operation to this running transaction
*
* @argument {string} key - the key of the object to put
* @argument {string} value - the value to put
*
* @throws {Error} an error described by the following properties
* - pushOnCommittedTransaction if already committed
* - missingKey if the key is missing from the op
* - missingValue if putting without a value
*
* @returns {undefined}
*
* @see push
*/
put(key, value) {
this.push({ type: 'put', key, value });
}
/**
* Adds a new del operation to this running transaction
*
* @argument {string} key - the key of the object to delete
*
* @throws {Error} an error described by the following properties
* - pushOnCommittedTransaction if already committed
* - missingKey if the key is missing from the op
*
* @returns {undefined}
*
* @see push
*/
del(key) {
this.push({ type: 'del', key });
}
/**
* Adds a condition for the transaction
*
* @argument {object} condition an object with the following attributes:
* {
* <condition>: the object key
* }
* example: { notExists: 'key1' }
*
* @throws {Error} an error described by the following properties
* - pushOnCommittedTransaction if already committed
* - missingCondition if the condition is empty
*
* @returns {undefined}
*/
addCondition(condition) {
if (this.closed) {
throw propError('pushOnCommittedTransaction',
'can not add conditions to already committed transaction');
}
if (condition === undefined || Object.keys(condition).length === 0) {
throw propError('missingCondition', 'missing condition for conditional put');
}
if (typeof (condition.notExists) !== 'string') {
throw propError('unsupportedConditionalOperation', 'missing key or supported condition');
}
this.conditions.push(condition);
}
/**
* Applies the queued updates in this transaction atomically.
*
* @argument {function} cb function to be called when the commit
* finishes, taking an optional error argument
*
* @returns {undefined}
*/
commit(cb) {
if (this.closed) {
return cb(propError('alreadyCommitted',
'transaction was already committed'));
}
if (this.operations.length === 0) {
return cb(propError('emptyTransaction',
'tried to commit an empty transaction'));
}
this.closed = true;
writeOptions.conditions = this.conditions;
// The array-of-operations variant of the `batch` method
// allows passing options such has `sync: true` whereas the
// chained form does not.
return this.db.batch(this.operations, writeOptions, cb);
}
}
module.exports = {
IndexTransaction,
};

View File

@ -1,4 +1,4 @@
function reshapeExceptionError(error) { export function reshapeExceptionError(error: any) {
const { message, code, stack, name } = error; const { message, code, stack, name } = error;
return { return {
message, message,
@ -7,7 +7,3 @@ function reshapeExceptionError(error) {
name, name,
}; };
} }
module.exports = {
reshapeExceptionError,
};

View File

@ -100,6 +100,8 @@ class ArsenalError extends Error {
const errors = ArsenalError.errorMap const errors = ArsenalError.errorMap
export type { ArsenalError };
export default { export default {
...errors ...errors
}; };

View File

@ -17,9 +17,9 @@ describe('decyrptSecret', () => {
describe('parseServiceCredentials', () => { describe('parseServiceCredentials', () => {
const conf = { const conf = {
users: [{ accessKey, users: [{ accessKey,
accountType: 'service-clueso', accountType: 'service-clueso',
secretKey, secretKey,
userName: 'Search Service Account' }], userName: 'Search Service Account' }],
}; };
const auth = JSON.stringify({ privateKey }); const auth = JSON.stringify({ privateKey });

View File

@ -1,6 +1,8 @@
'use strict'; // eslint-disable-line 'use strict'; // eslint-disable-line
const debug = require('util').debuglog('jsutil'); import { debuglog } from 'util';
const debug = debuglog('jsutil');
// JavaScript utility functions // JavaScript utility functions
@ -17,9 +19,9 @@ const debug = require('util').debuglog('jsutil');
* @return {function} a callable wrapper mirroring <tt>func</tt> but * @return {function} a callable wrapper mirroring <tt>func</tt> but
* only calls <tt>func</tt> at first invocation. * only calls <tt>func</tt> at first invocation.
*/ */
module.exports.once = function once(func) { export function once(func: Function): Function {
const state = { called: false, res: undefined }; const state = { called: false, res: undefined };
return function wrapper(...args) { return function wrapper(...args: any) {
if (!state.called) { if (!state.called) {
state.called = true; state.called = true;
state.res = func.apply(func, args); state.res = func.apply(func, args);

View File

@ -17,7 +17,7 @@ class RedisClient {
method: 'RedisClient.constructor', method: 'RedisClient.constructor',
redisHost: config.host, redisHost: config.host,
redisPort: config.port, redisPort: config.port,
}) }),
); );
return this; return this;
} }

View File

@ -9,7 +9,6 @@ const StatsClient = require('./StatsClient');
* rather than by seconds * rather than by seconds
*/ */
class StatsModel extends StatsClient { class StatsModel extends StatsClient {
/** /**
* Utility method to convert 2d array rows to columns, and vice versa * Utility method to convert 2d array rows to columns, and vice versa
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip * See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip

View File

@ -2,8 +2,8 @@ const promClient = require('prom-client');
const collectDefaultMetricsIntervalMs = const collectDefaultMetricsIntervalMs =
process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined ? process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS !== undefined ?
Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) : Number.parseInt(process.env.COLLECT_DEFAULT_METRICS_INTERVAL_MS, 10) :
10000; 10000;
promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs }); promClient.collectDefaultMetrics({ timeout: collectDefaultMetricsIntervalMs });

View File

@ -27,7 +27,7 @@ class ARN {
static createFromString(arnStr) { static createFromString(arnStr) {
const [arn, partition, service, region, accountId, const [arn, partition, service, region, accountId,
resourceType, resource] = arnStr.split(':'); resourceType, resource] = arnStr.split(':');
if (arn !== 'arn') { if (arn !== 'arn') {
return { error: errors.InvalidArgument.customizeDescription( return { error: errors.InvalidArgument.customizeDescription(
@ -58,7 +58,7 @@ class ARN {
'must be a 12-digit number or "*"') }; 'must be a 12-digit number or "*"') };
} }
const fullResource = (resource !== undefined ? const fullResource = (resource !== undefined ?
`${resourceType}:${resource}` : resourceType); `${resourceType}:${resource}` : resourceType);
return new ARN(partition, service, region, accountId, fullResource); return new ARN(partition, service, region, accountId, fullResource);
} }
@ -98,7 +98,7 @@ class ARN {
toString() { toString() {
return ['arn', this.getPartition(), this.getService(), return ['arn', this.getPartition(), this.getService(),
this.getRegion(), this.getAccountId(), this.getResource()] this.getRegion(), this.getAccountId(), this.getResource()]
.join(':'); .join(':');
} }
} }

View File

@ -52,9 +52,9 @@ class BackendInfo {
*/ */
static isRequestEndpointPresent(config, requestEndpoint, log) { static isRequestEndpointPresent(config, requestEndpoint, log) {
if (Object.keys(config.restEndpoints). if (Object.keys(config.restEndpoints).
indexOf(requestEndpoint) < 0) { indexOf(requestEndpoint) < 0) {
log.trace('requestEndpoint does not match config restEndpoints', log.trace('requestEndpoint does not match config restEndpoints',
{ requestEndpoint }); { requestEndpoint });
return false; return false;
} }
return true; return true;
@ -70,10 +70,10 @@ class BackendInfo {
*/ */
static isRequestEndpointValueValid(config, requestEndpoint, log) { static isRequestEndpointValueValid(config, requestEndpoint, log) {
if (Object.keys(config.locationConstraints). if (Object.keys(config.locationConstraints).
indexOf(config.restEndpoints[requestEndpoint]) < 0) { indexOf(config.restEndpoints[requestEndpoint]) < 0) {
log.trace('the default locationConstraint for request' + log.trace('the default locationConstraint for request' +
'Endpoint does not match any config locationConstraint', 'Endpoint does not match any config locationConstraint',
{ requestEndpoint }); { requestEndpoint });
return false; return false;
} }
return true; return true;
@ -110,7 +110,7 @@ class BackendInfo {
*/ */
static isValidRequestEndpointOrBackend(config, requestEndpoint, log) { static isValidRequestEndpointOrBackend(config, requestEndpoint, log) {
if (!BackendInfo.isRequestEndpointPresent(config, requestEndpoint, if (!BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
log)) { log)) {
return BackendInfo.isMemOrFileBackend(config, log); return BackendInfo.isMemOrFileBackend(config, log);
} }
return BackendInfo.isRequestEndpointValueValid(config, requestEndpoint, return BackendInfo.isRequestEndpointValueValid(config, requestEndpoint,
@ -132,7 +132,7 @@ class BackendInfo {
bucketLocationConstraint, requestEndpoint, log) { bucketLocationConstraint, requestEndpoint, log) {
if (objectLocationConstraint) { if (objectLocationConstraint) {
if (BackendInfo.isValidLocationConstraint(config, if (BackendInfo.isValidLocationConstraint(config,
objectLocationConstraint, log)) { objectLocationConstraint, log)) {
log.trace('objectLocationConstraint is valid'); log.trace('objectLocationConstraint is valid');
return { isValid: true }; return { isValid: true };
} }
@ -143,7 +143,7 @@ class BackendInfo {
} }
if (bucketLocationConstraint) { if (bucketLocationConstraint) {
if (BackendInfo.isValidLocationConstraint(config, if (BackendInfo.isValidLocationConstraint(config,
bucketLocationConstraint, log)) { bucketLocationConstraint, log)) {
log.trace('bucketLocationConstraint is valid'); log.trace('bucketLocationConstraint is valid');
return { isValid: true }; return { isValid: true };
} }
@ -159,7 +159,7 @@ class BackendInfo {
return { isValid: true, legacyLocationConstraint }; return { isValid: true, legacyLocationConstraint };
} }
if (!BackendInfo.isValidRequestEndpointOrBackend(config, if (!BackendInfo.isValidRequestEndpointOrBackend(config,
requestEndpoint, log)) { requestEndpoint, log)) {
return { isValid: false, description: 'Endpoint Location Error - ' + return { isValid: false, description: 'Endpoint Location Error - ' +
`Your endpoint "${requestEndpoint}" is not in restEndpoints ` + `Your endpoint "${requestEndpoint}" is not in restEndpoints ` +
'in your config OR the default location constraint for request ' + 'in your config OR the default location constraint for request ' +
@ -167,7 +167,7 @@ class BackendInfo {
'match any config locationConstraint - Please update.' }; 'match any config locationConstraint - Please update.' };
} }
if (BackendInfo.isRequestEndpointPresent(config, requestEndpoint, if (BackendInfo.isRequestEndpointPresent(config, requestEndpoint,
log)) { log)) {
return { isValid: true }; return { isValid: true };
} }
return { isValid: true, defaultedToDataBackend: true }; return { isValid: true, defaultedToDataBackend: true };

View File

@ -69,13 +69,13 @@ class BucketInfo {
* @param {object} [notificationConfiguration] - bucket notification configuration * @param {object} [notificationConfiguration] - bucket notification configuration
*/ */
constructor(name, owner, ownerDisplayName, creationDate, constructor(name, owner, ownerDisplayName, creationDate,
mdBucketModelVersion, acl, transient, deleted, mdBucketModelVersion, acl, transient, deleted,
serverSideEncryption, versioningConfiguration, serverSideEncryption, versioningConfiguration,
locationConstraint, websiteConfiguration, cors, locationConstraint, websiteConfiguration, cors,
replicationConfiguration, lifecycleConfiguration, replicationConfiguration, lifecycleConfiguration,
bucketPolicy, uid, readLocationConstraint, isNFS, bucketPolicy, uid, readLocationConstraint, isNFS,
ingestionConfig, azureInfo, objectLockEnabled, ingestionConfig, azureInfo, objectLockEnabled,
objectLockConfiguration, notificationConfiguration) { objectLockConfiguration, notificationConfiguration) {
assert.strictEqual(typeof name, 'string'); assert.strictEqual(typeof name, 'string');
assert.strictEqual(typeof owner, 'string'); assert.strictEqual(typeof owner, 'string');
assert.strictEqual(typeof ownerDisplayName, 'string'); assert.strictEqual(typeof ownerDisplayName, 'string');
@ -94,7 +94,7 @@ class BucketInfo {
if (serverSideEncryption) { if (serverSideEncryption) {
assert.strictEqual(typeof serverSideEncryption, 'object'); assert.strictEqual(typeof serverSideEncryption, 'object');
const { cryptoScheme, algorithm, masterKeyId, const { cryptoScheme, algorithm, masterKeyId,
configuredMasterKeyId, mandatory } = serverSideEncryption; configuredMasterKeyId, mandatory } = serverSideEncryption;
assert.strictEqual(typeof cryptoScheme, 'number'); assert.strictEqual(typeof cryptoScheme, 'number');
assert.strictEqual(typeof algorithm, 'string'); assert.strictEqual(typeof algorithm, 'string');
assert.strictEqual(typeof masterKeyId, 'string'); assert.strictEqual(typeof masterKeyId, 'string');

View File

@ -379,7 +379,7 @@ class LifecycleConfiguration {
if (!tags[i].Key || !tags[i].Value) { if (!tags[i].Key || !tags[i].Value) {
tagObj.error = tagObj.error =
errors.MissingRequiredParameter.customizeDescription( errors.MissingRequiredParameter.customizeDescription(
'Tag XML does not contain both Key and Value'); 'Tag XML does not contain both Key and Value');
break; break;
} }
@ -927,7 +927,7 @@ class LifecycleConfiguration {
const daysInt = parseInt(subExp.Days[0], 10); const daysInt = parseInt(subExp.Days[0], 10);
if (daysInt < 1) { if (daysInt < 1) {
expObj.error = errors.InvalidArgument.customizeDescription( expObj.error = errors.InvalidArgument.customizeDescription(
'Expiration days is not a positive integer'); 'Expiration days is not a positive integer');
} else { } else {
expObj.days = daysInt; expObj.days = daysInt;
} }
@ -1123,10 +1123,10 @@ class LifecycleConfiguration {
const { noncurrentDays, storageClass } = transition; const { noncurrentDays, storageClass } = transition;
xml.push( xml.push(
`<${actionName}>`, `<${actionName}>`,
`<NoncurrentDays>${noncurrentDays}` + `<NoncurrentDays>${noncurrentDays}` +
'</NoncurrentDays>', '</NoncurrentDays>',
`<StorageClass>${storageClass}</StorageClass>`, `<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>` `</${actionName}>`,
); );
}); });
Action = xml.join(''); Action = xml.join('');
@ -1144,9 +1144,9 @@ class LifecycleConfiguration {
} }
xml.push( xml.push(
`<${actionName}>`, `<${actionName}>`,
element, element,
`<StorageClass>${storageClass}</StorageClass>`, `<StorageClass>${storageClass}</StorageClass>`,
`</${actionName}>` `</${actionName}>`,
); );
}); });
Action = xml.join(''); Action = xml.join('');

View File

@ -27,7 +27,7 @@ const errors = require('../errors');
* </NotificationConfiguration> * </NotificationConfiguration>
*/ */
/** /**
* Format of config: * Format of config:
* *
* config = { * config = {

View File

@ -17,7 +17,7 @@ const errors = require('../errors');
* </ObjectLockConfiguration> * </ObjectLockConfiguration>
*/ */
/** /**
* Format of config: * Format of config:
* *
* config = { * config = {

View File

@ -10,7 +10,6 @@ const ObjectMDLocation = require('./ObjectMDLocation');
* mpuPart metadata for example) * mpuPart metadata for example)
*/ */
class ObjectMD { class ObjectMD {
/** /**
* Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is * Create a new instance of ObjectMD. Parameter <tt>objMd</tt> is
* reserved for internal use, users should call * reserved for internal use, users should call
@ -148,7 +147,7 @@ class ObjectMD {
Object.assign(this._data, objMd._data); Object.assign(this._data, objMd._data);
Object.assign(this._data.replicationInfo, Object.assign(this._data.replicationInfo,
objMd._data.replicationInfo); objMd._data.replicationInfo);
} }
_updateFromParsedJSON(objMd) { _updateFromParsedJSON(objMd) {

View File

@ -3,7 +3,6 @@
* 'location' array * 'location' array
*/ */
class ObjectMDLocation { class ObjectMDLocation {
/** /**
* @constructor * @constructor
* @param {object} locationObj - single data location info * @param {object} locationObj - single data location info

View File

@ -111,7 +111,7 @@ class RoundRobin {
pickHost() { pickHost() {
if (this.logger) { if (this.logger) {
this.logger.debug('pick host', this.logger.debug('pick host',
{ host: this.getCurrentHost() }); { host: this.getCurrentHost() });
} }
const curHost = this.getCurrentHost(); const curHost = this.getCurrentHost();
++this.pickCount; ++this.pickCount;
@ -163,7 +163,7 @@ class RoundRobin {
} }
if (this.logger) { if (this.logger) {
this.logger.debug('round robin host', this.logger.debug('round robin host',
{ newHost: this.getCurrentHost() }); { newHost: this.getCurrentHost() });
} }
} }
} }

View File

@ -10,7 +10,6 @@ const { checkSupportIPv6 } = require('./utils');
class Server { class Server {
/** /**
* @constructor * @constructor
* *
@ -431,16 +430,16 @@ class Server {
// Setting no delay of the socket to the value configured // Setting no delay of the socket to the value configured
sock.setNoDelay(this.isNoDelay()); sock.setNoDelay(this.isNoDelay());
sock.on('error', err => this._logger.info( sock.on('error', err => this._logger.info(
'socket error - request rejected', { error: err })); 'socket error - request rejected', { error: err }));
}); });
this._server.on('tlsClientError', (err, sock) => this._server.on('tlsClientError', (err, sock) =>
this._onClientError(err, sock)); this._onClientError(err, sock));
this._server.on('clientError', (err, sock) => this._server.on('clientError', (err, sock) =>
this._onClientError(err, sock)); this._onClientError(err, sock));
this._server.on('checkContinue', (req, res) => this._server.on('checkContinue', (req, res) =>
this._onCheckContinue(req, res)); this._onCheckContinue(req, res));
this._server.on('checkExpectation', (req, res) => this._server.on('checkExpectation', (req, res) =>
this._onCheckExpectation(req, res)); this._onCheckExpectation(req, res));
this._server.on('listening', () => this._onListening()); this._server.on('listening', () => this._onListening());
} }
this._server.listen(this._port, this._address); this._server.listen(this._port, this._address);

View File

@ -72,8 +72,8 @@ function getByteRangeFromSpec(rangeSpec, objectSize) {
if (rangeSpec.start < objectSize) { if (rangeSpec.start < objectSize) {
// test is false if end is undefined // test is false if end is undefined
return { range: [rangeSpec.start, return { range: [rangeSpec.start,
(rangeSpec.end < objectSize ? (rangeSpec.end < objectSize ?
rangeSpec.end : objectSize - 1)] }; rangeSpec.end : objectSize - 1)] };
} }
return { error: errors.InvalidRange }; return { error: errors.InvalidRange };
} }

View File

@ -90,8 +90,8 @@ function _negotiateProtocolVersion(client, logger, cb) {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::negotiateProtocolVersion', logger.error('KMIP::negotiateProtocolVersion',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
const majorVersions = const majorVersions =
@ -102,8 +102,8 @@ function _negotiateProtocolVersion(client, logger, cb) {
majorVersions.length !== minorVersions.length) { majorVersions.length !== minorVersions.length) {
const error = _arsenalError('No suitable protocol version'); const error = _arsenalError('No suitable protocol version');
logger.error('KMIP::negotiateProtocolVersion', logger.error('KMIP::negotiateProtocolVersion',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
client.kmip.changeProtocolVersion(majorVersions[0], minorVersions[0]); client.kmip.changeProtocolVersion(majorVersions[0], minorVersions[0]);
@ -126,8 +126,8 @@ function _mapExtensions(client, logger, cb) {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::mapExtensions', logger.error('KMIP::mapExtensions',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
const extensionNames = response.lookup(searchFilter.extensionName); const extensionNames = response.lookup(searchFilter.extensionName);
@ -135,8 +135,8 @@ function _mapExtensions(client, logger, cb) {
if (extensionNames.length !== extensionTags.length) { if (extensionNames.length !== extensionTags.length) {
const error = _arsenalError('Inconsistent extension list'); const error = _arsenalError('Inconsistent extension list');
logger.error('KMIP::mapExtensions', logger.error('KMIP::mapExtensions',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
extensionNames.forEach((extensionName, idx) => { extensionNames.forEach((extensionName, idx) => {
@ -160,7 +160,7 @@ function _queryServerInformation(client, logger, cb) {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.warn('KMIP::queryServerInformation', logger.warn('KMIP::queryServerInformation',
{ error }); { error });
/* no error returned, caller can keep going */ /* no error returned, caller can keep going */
return cb(); return cb();
} }
@ -170,9 +170,9 @@ function _queryServerInformation(client, logger, cb) {
JSON.stringify(response.lookup(searchFilter.serverInformation)[0])); JSON.stringify(response.lookup(searchFilter.serverInformation)[0]));
logger.info('KMIP Server identified', logger.info('KMIP Server identified',
{ vendorIdentification: client.vendorIdentification, { vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation, serverInformation: client.serverInformation,
negotiatedProtocolVersion: client.kmip.protocolVersion }); negotiatedProtocolVersion: client.kmip.protocolVersion });
return cb(); return cb();
}); });
} }
@ -196,8 +196,8 @@ function _queryOperationsAndObjects(client, logger, cb) {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::queryOperationsAndObjects', logger.error('KMIP::queryOperationsAndObjects',
{ error, { error,
vendorIdentification: client.vendorIdentification }); vendorIdentification: client.vendorIdentification });
return cb(error); return cb(error);
} }
const supportedOperations = response.lookup(searchFilter.operation); const supportedOperations = response.lookup(searchFilter.operation);
@ -222,15 +222,15 @@ function _queryOperationsAndObjects(client, logger, cb) {
logger.warn('KMIP::queryOperationsAndObjects: ' + logger.warn('KMIP::queryOperationsAndObjects: ' +
'The KMIP Server announces that it ' + 'The KMIP Server announces that it ' +
'does not support all of the required features', 'does not support all of the required features',
{ vendorIdentification: client.vendorIdentification, { vendorIdentification: client.vendorIdentification,
serverInformation: client.serverInformation, serverInformation: client.serverInformation,
supportsEncrypt, supportsDecrypt, supportsEncrypt, supportsDecrypt,
supportsActivate, supportsRevoke, supportsActivate, supportsRevoke,
supportsCreate, supportsDestroy, supportsCreate, supportsDestroy,
supportsQuery, supportsSymmetricKeys }); supportsQuery, supportsSymmetricKeys });
} else { } else {
logger.info('KMIP Server provides the necessary feature set', logger.info('KMIP Server provides the necessary feature set',
{ vendorIdentification: client.vendorIdentification }); { vendorIdentification: client.vendorIdentification });
} }
return cb(); return cb();
}); });
@ -264,8 +264,8 @@ class Client {
this.vendorIdentification = ''; this.vendorIdentification = '';
this.serverInformation = []; this.serverInformation = [];
this.kmip = new KMIP(CodecClass || TTLVCodec, this.kmip = new KMIP(CodecClass || TTLVCodec,
TransportClass || TlsTransport, TransportClass || TlsTransport,
options); options);
this.kmip.registerHandshakeFunction((logger, cb) => { this.kmip.registerHandshakeFunction((logger, cb) => {
this._kmipHandshake(logger, cb); this._kmipHandshake(logger, cb);
}); });
@ -322,8 +322,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::_activateBucketKey', logger.error('KMIP::_activateBucketKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -332,7 +332,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the expected identifier'); 'Server did not return the expected identifier');
logger.error('KMIP::cipherDataKey', logger.error('KMIP::cipherDataKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(null, keyIdentifier); return cb(null, keyIdentifier);
@ -351,20 +351,20 @@ class Client {
const attributes = []; const attributes = [];
if (!!this.options.bucketNameAttributeName) { if (!!this.options.bucketNameAttributeName) {
attributes.push(KMIP.Attribute('TextString', attributes.push(KMIP.Attribute('TextString',
this.options.bucketNameAttributeName, this.options.bucketNameAttributeName,
bucketName)); bucketName));
} }
attributes.push(...[ attributes.push(...[
KMIP.Attribute('Enumeration', 'Cryptographic Algorithm', KMIP.Attribute('Enumeration', 'Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM), CRYPTOGRAPHIC_ALGORITHM),
KMIP.Attribute('Integer', 'Cryptographic Length', KMIP.Attribute('Integer', 'Cryptographic Length',
CRYPTOGRAPHIC_LENGTH), CRYPTOGRAPHIC_LENGTH),
KMIP.Attribute('Integer', 'Cryptographic Usage Mask', KMIP.Attribute('Integer', 'Cryptographic Usage Mask',
this.kmip.encodeMask('Cryptographic Usage Mask', this.kmip.encodeMask('Cryptographic Usage Mask',
CRYPTOGRAPHIC_USAGE_MASK))]); CRYPTOGRAPHIC_USAGE_MASK))]);
if (this.options.compoundCreateActivate) { if (this.options.compoundCreateActivate) {
attributes.push(KMIP.Attribute('Date-Time', 'Activation Date', attributes.push(KMIP.Attribute('Date-Time', 'Activation Date',
new Date(Date.UTC()))); new Date(Date.UTC())));
} }
return this.kmip.request(logger, 'Create', [ return this.kmip.request(logger, 'Create', [
@ -374,8 +374,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::createBucketKey', logger.error('KMIP::createBucketKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const createdObjectType = const createdObjectType =
@ -386,7 +386,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server created an object of wrong type'); 'Server created an object of wrong type');
logger.error('KMIP::createBucketKey', logger.error('KMIP::createBucketKey',
{ error, createdObjectType }); { error, createdObjectType });
return cb(error); return cb(error);
} }
if (!this.options.compoundCreateActivate) { if (!this.options.compoundCreateActivate) {
@ -411,16 +411,16 @@ class Client {
KMIP.TextString('Unique Identifier', bucketKeyId), KMIP.TextString('Unique Identifier', bucketKeyId),
KMIP.Structure('Revocation Reason', [ KMIP.Structure('Revocation Reason', [
KMIP.Enumeration('Revocation Reason Code', KMIP.Enumeration('Revocation Reason Code',
'Cessation of Operation'), 'Cessation of Operation'),
KMIP.TextString('Revocation Message', KMIP.TextString('Revocation Message',
'About to be deleted'), 'About to be deleted'),
]), ]),
], (err, response) => { ], (err, response) => {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::_revokeBucketKey', logger.error('KMIP::_revokeBucketKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -429,7 +429,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the expected identifier'); 'Server did not return the expected identifier');
logger.error('KMIP::_revokeBucketKey', logger.error('KMIP::_revokeBucketKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(); return cb();
@ -448,8 +448,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::destroyBucketKey: revocation failed', logger.error('KMIP::destroyBucketKey: revocation failed',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
return this.kmip.request(logger, 'Destroy', [ return this.kmip.request(logger, 'Destroy', [
@ -458,8 +458,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::destroyBucketKey', logger.error('KMIP::destroyBucketKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -468,7 +468,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the expected identifier'); 'Server did not return the expected identifier');
logger.error('KMIP::destroyBucketKey', logger.error('KMIP::destroyBucketKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(); return cb();
@ -487,19 +487,19 @@ class Client {
* @callback called with (err, cipheredDataKey: Buffer) * @callback called with (err, cipheredDataKey: Buffer)
*/ */
cipherDataKey(cryptoScheme, cipherDataKey(cryptoScheme,
masterKeyId, masterKeyId,
plainTextDataKey, plainTextDataKey,
logger, logger,
cb) { cb) {
return this.kmip.request(logger, 'Encrypt', [ return this.kmip.request(logger, 'Encrypt', [
KMIP.TextString('Unique Identifier', masterKeyId), KMIP.TextString('Unique Identifier', masterKeyId),
KMIP.Structure('Cryptographic Parameters', [ KMIP.Structure('Cryptographic Parameters', [
KMIP.Enumeration('Block Cipher Mode', KMIP.Enumeration('Block Cipher Mode',
CRYPTOGRAPHIC_CIPHER_MODE), CRYPTOGRAPHIC_CIPHER_MODE),
KMIP.Enumeration('Padding Method', KMIP.Enumeration('Padding Method',
CRYPTOGRAPHIC_PADDING_METHOD), CRYPTOGRAPHIC_PADDING_METHOD),
KMIP.Enumeration('Cryptographic Algorithm', KMIP.Enumeration('Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM), CRYPTOGRAPHIC_ALGORITHM),
]), ]),
KMIP.ByteString('Data', plainTextDataKey), KMIP.ByteString('Data', plainTextDataKey),
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV), KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
@ -507,8 +507,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::cipherDataKey', logger.error('KMIP::cipherDataKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -518,7 +518,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the expected identifier'); 'Server did not return the expected identifier');
logger.error('KMIP::cipherDataKey', logger.error('KMIP::cipherDataKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(null, data); return cb(null, data);
@ -536,19 +536,19 @@ class Client {
* @callback called with (err, plainTextDataKey: Buffer) * @callback called with (err, plainTextDataKey: Buffer)
*/ */
decipherDataKey(cryptoScheme, decipherDataKey(cryptoScheme,
masterKeyId, masterKeyId,
cipheredDataKey, cipheredDataKey,
logger, logger,
cb) { cb) {
return this.kmip.request(logger, 'Decrypt', [ return this.kmip.request(logger, 'Decrypt', [
KMIP.TextString('Unique Identifier', masterKeyId), KMIP.TextString('Unique Identifier', masterKeyId),
KMIP.Structure('Cryptographic Parameters', [ KMIP.Structure('Cryptographic Parameters', [
KMIP.Enumeration('Block Cipher Mode', KMIP.Enumeration('Block Cipher Mode',
CRYPTOGRAPHIC_CIPHER_MODE), CRYPTOGRAPHIC_CIPHER_MODE),
KMIP.Enumeration('Padding Method', KMIP.Enumeration('Padding Method',
CRYPTOGRAPHIC_PADDING_METHOD), CRYPTOGRAPHIC_PADDING_METHOD),
KMIP.Enumeration('Cryptographic Algorithm', KMIP.Enumeration('Cryptographic Algorithm',
CRYPTOGRAPHIC_ALGORITHM), CRYPTOGRAPHIC_ALGORITHM),
]), ]),
KMIP.ByteString('Data', cipheredDataKey), KMIP.ByteString('Data', cipheredDataKey),
KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV), KMIP.ByteString('IV/Counter/Nonce', CRYPTOGRAPHIC_DEFAULT_IV),
@ -556,8 +556,8 @@ class Client {
if (err) { if (err) {
const error = _arsenalError(err); const error = _arsenalError(err);
logger.error('KMIP::decipherDataKey', logger.error('KMIP::decipherDataKey',
{ error, { error,
serverInformation: this.serverInformation }); serverInformation: this.serverInformation });
return cb(error); return cb(error);
} }
const uniqueIdentifier = const uniqueIdentifier =
@ -567,7 +567,7 @@ class Client {
const error = _arsenalError( const error = _arsenalError(
'Server did not return the right identifier'); 'Server did not return the right identifier');
logger.error('KMIP::decipherDataKey', logger.error('KMIP::decipherDataKey',
{ error, uniqueIdentifier }); { error, uniqueIdentifier });
return cb(error); return cb(error);
} }
return cb(null, data); return cb(null, data);

View File

@ -55,15 +55,15 @@ function TTLVCodec() {
const property = {}; const property = {};
if (!TypeDecoder[elementType]) { if (!TypeDecoder[elementType]) {
_throwError(logger, _throwError(logger,
'Unknown element type', 'Unknown element type',
{ funcName, elementTag, elementType }); { funcName, elementTag, elementType });
} }
const elementValue = value.slice(i + 8, const elementValue = value.slice(i + 8,
i + 8 + elementLength); i + 8 + elementLength);
if (elementValue.length !== elementLength) { if (elementValue.length !== elementLength) {
_throwError(logger, 'BUG: Wrong buffer size', _throwError(logger, 'BUG: Wrong buffer size',
{ funcName, elementLength, { funcName, elementLength,
bufferLength: elementValue.length }); bufferLength: elementValue.length });
} }
property.type = TypeDecoder[elementType].name; property.type = TypeDecoder[elementType].name;
property.value = TypeDecoder[elementType] property.value = TypeDecoder[elementType]
@ -75,7 +75,7 @@ function TTLVCodec() {
const tagInfo = TagDecoder[elementTag]; const tagInfo = TagDecoder[elementTag];
if (!tagInfo) { if (!tagInfo) {
logger.debug('Unknown element tag', logger.debug('Unknown element tag',
{ funcName, elementTag }); { funcName, elementTag });
property.tag = elementTag; property.tag = elementTag;
element['Unknown Tag'] = property; element['Unknown Tag'] = property;
} else { } else {
@ -83,8 +83,8 @@ function TTLVCodec() {
if (tagInfo.name === 'Attribute Name') { if (tagInfo.name === 'Attribute Name') {
if (property.type !== 'TextString') { if (property.type !== 'TextString') {
_throwError(logger, _throwError(logger,
'Invalide type', 'Invalide type',
{ funcName, type: property.type }); { funcName, type: property.type });
} }
diversion = property.value; diversion = property.value;
} }
@ -114,8 +114,8 @@ function TTLVCodec() {
} }
const itemResult = const itemResult =
TypeEncoder[itemType].encode(itemTagName, TypeEncoder[itemType].encode(itemTagName,
itemValue, itemValue,
itemDiversion); itemDiversion);
encodedValue = encodedValue encodedValue = encodedValue
.concat(_ttlvPadVector(itemResult)); .concat(_ttlvPadVector(itemResult));
}); });
@ -133,9 +133,9 @@ function TTLVCodec() {
const fixedLength = 4; const fixedLength = 4;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
return value.readUInt32BE(0); return value.readUInt32BE(0);
}, },
@ -156,16 +156,16 @@ function TTLVCodec() {
const fixedLength = 8; const fixedLength = 8;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
const longUInt = UINT32_MAX * value.readUInt32BE(0) + const longUInt = UINT32_MAX * value.readUInt32BE(0) +
value.readUInt32BE(4); value.readUInt32BE(4);
if (longUInt > Number.MAX_SAFE_INTEGER) { if (longUInt > Number.MAX_SAFE_INTEGER) {
_throwError(logger, _throwError(logger,
'53-bit overflow', '53-bit overflow',
{ funcName, longUInt }); { funcName, longUInt });
} }
return longUInt; return longUInt;
}, },
@ -200,9 +200,9 @@ function TTLVCodec() {
const fixedLength = 4; const fixedLength = 4;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
const enumValue = value.toString('hex'); const enumValue = value.toString('hex');
const actualTag = diversion ? TagEncoder[diversion].value : tag; const actualTag = diversion ? TagEncoder[diversion].value : tag;
@ -211,10 +211,10 @@ function TTLVCodec() {
!enumInfo.enumeration || !enumInfo.enumeration ||
!enumInfo.enumeration[enumValue]) { !enumInfo.enumeration[enumValue]) {
return { tag, return { tag,
value: enumValue, value: enumValue,
message: 'Unknown enumeration value', message: 'Unknown enumeration value',
diversion, diversion,
}; };
} }
return enumInfo.enumeration[enumValue]; return enumInfo.enumeration[enumValue];
}, },
@ -227,7 +227,7 @@ function TTLVCodec() {
const actualTag = diversion || tagName; const actualTag = diversion || tagName;
const encodedValue = const encodedValue =
Buffer.from(TagEncoder[actualTag].enumeration[value], Buffer.from(TagEncoder[actualTag].enumeration[value],
'hex'); 'hex');
return _ttlvPadVector([tag, type, length, encodedValue]); return _ttlvPadVector([tag, type, length, encodedValue]);
}, },
}, },
@ -238,9 +238,9 @@ function TTLVCodec() {
const fixedLength = 8; const fixedLength = 8;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
const msUInt = value.readUInt32BE(0); const msUInt = value.readUInt32BE(0);
const lsUInt = value.readUInt32BE(4); const lsUInt = value.readUInt32BE(4);
@ -267,7 +267,7 @@ function TTLVCodec() {
const length = Buffer.alloc(4); const length = Buffer.alloc(4);
length.writeUInt32BE(value.length); length.writeUInt32BE(value.length);
return _ttlvPadVector([tag, type, length, return _ttlvPadVector([tag, type, length,
Buffer.from(value, 'utf8')]); Buffer.from(value, 'utf8')]);
}, },
}, },
'08': { '08': {
@ -289,17 +289,17 @@ function TTLVCodec() {
const fixedLength = 8; const fixedLength = 8;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
const d = new Date(0); const d = new Date(0);
const utcSeconds = UINT32_MAX * value.readUInt32BE(0) + const utcSeconds = UINT32_MAX * value.readUInt32BE(0) +
value.readUInt32BE(4); value.readUInt32BE(4);
if (utcSeconds > Number.MAX_SAFE_INTEGER) { if (utcSeconds > Number.MAX_SAFE_INTEGER) {
_throwError(logger, _throwError(logger,
'53-bit overflow', '53-bit overflow',
{ funcName, utcSeconds }); { funcName, utcSeconds });
} }
d.setUTCSeconds(utcSeconds); d.setUTCSeconds(utcSeconds);
return d; return d;
@ -323,9 +323,9 @@ function TTLVCodec() {
const fixedLength = 4; const fixedLength = 4;
if (fixedLength !== value.length) { if (fixedLength !== value.length) {
_throwError(logger, _throwError(logger,
'Length mismatch', 'Length mismatch',
{ funcName, fixedLength, { funcName, fixedLength,
bufferLength: value.length }); bufferLength: value.length });
} }
return value.readInt32BE(0); return value.readInt32BE(0);
}, },
@ -415,8 +415,8 @@ function TTLVCodec() {
throw Error(`Unknown Type '${type}'`); throw Error(`Unknown Type '${type}'`);
} }
const itemValue = TypeEncoder[type].encode(key, const itemValue = TypeEncoder[type].encode(key,
item[key].value, item[key].value,
item[key].diversion); item[key].diversion);
result = result.concat(_ttlvPadVector(itemValue)); result = result.concat(_ttlvPadVector(itemValue));
}); });
}); });

View File

@ -275,11 +275,11 @@ class KMIP {
KMIP.Structure('Request Header', [ KMIP.Structure('Request Header', [
KMIP.Structure('Protocol Version', [ KMIP.Structure('Protocol Version', [
KMIP.Integer('Protocol Version Major', KMIP.Integer('Protocol Version Major',
this.protocolVersion.major), this.protocolVersion.major),
KMIP.Integer('Protocol Version Minor', KMIP.Integer('Protocol Version Minor',
this.protocolVersion.minor)]), this.protocolVersion.minor)]),
KMIP.Integer('Maximum Response Size', KMIP.Integer('Maximum Response Size',
this.maximumResponseSize), this.maximumResponseSize),
KMIP.Integer('Batch Count', 1)]), KMIP.Integer('Batch Count', 1)]),
KMIP.Structure('Batch Item', [ KMIP.Structure('Batch Item', [
KMIP.Enumeration('Operation', operation), KMIP.Enumeration('Operation', operation),
@ -292,7 +292,7 @@ class KMIP {
(err, conversation, rawResponse) => { (err, conversation, rawResponse) => {
if (err) { if (err) {
logger.error('KMIP::request: Failed to send message', logger.error('KMIP::request: Failed to send message',
{ error: err }); { error: err });
return cb(err); return cb(err);
} }
const response = this._decodeMessage(logger, rawResponse); const response = this._decodeMessage(logger, rawResponse);
@ -311,16 +311,16 @@ class KMIP {
this.transport.abortPipeline(conversation); this.transport.abortPipeline(conversation);
const error = Error('Invalid batch item ID returned'); const error = Error('Invalid batch item ID returned');
logger.error('KMIP::request: failed', logger.error('KMIP::request: failed',
{ resultUniqueBatchItemID, uuid, error }); { resultUniqueBatchItemID, uuid, error });
return cb(error); return cb(error);
} }
if (performedOperation !== operation) { if (performedOperation !== operation) {
this.transport.abortPipeline(conversation); this.transport.abortPipeline(conversation);
const error = Error('Operation mismatch', const error = Error('Operation mismatch',
{ got: performedOperation, { got: performedOperation,
expected: operation }); expected: operation });
logger.error('KMIP::request: Operation mismatch', logger.error('KMIP::request: Operation mismatch',
{ error }); { error });
return cb(error); return cb(error);
} }
if (resultStatus !== 'Success') { if (resultStatus !== 'Success') {
@ -331,19 +331,17 @@ class KMIP {
response.lookup( response.lookup(
'Response Message/Batch Item/Result Message')[0]; 'Response Message/Batch Item/Result Message')[0];
const error = Error('KMIP request failure', const error = Error('KMIP request failure',
{ resultStatus, { resultStatus,
resultReason, resultReason,
resultMessage }); resultMessage });
logger.error('KMIP::request: request failed', logger.error('KMIP::request: request failed',
{ error, resultStatus, { error, resultStatus,
resultReason, resultMessage }); resultReason, resultMessage });
return cb(error); return cb(error);
} }
return cb(null, response); return cb(null, response);
}); });
} }
} }

View File

@ -86,8 +86,8 @@ class TransportTemplate {
const deferedRequest = this.deferedRequests.shift(); const deferedRequest = this.deferedRequests.shift();
process.nextTick(() => { process.nextTick(() => {
this.send(logger, this.send(logger,
deferedRequest.encodedMessage, deferedRequest.encodedMessage,
deferedRequest.cb); deferedRequest.cb);
}); });
} else if (this.callbackPipeline.length === 0 && } else if (this.callbackPipeline.length === 0 &&
this.deferedRequests.length === 0 && this.deferedRequests.length === 0 &&

View File

@ -26,7 +26,7 @@ function sendError(res, log, error, optMessage) {
httpCode: error.code, httpCode: error.code,
errorType: error.message, errorType: error.message,
error: message, error: message,
} },
); );
res.writeHead(error.code); res.writeHead(error.code);
res.end(JSON.stringify({ res.end(JSON.stringify({

View File

@ -19,7 +19,7 @@ function setContentRange(response, byteRange, objectSize) {
const [start, end] = byteRange; const [start, end] = byteRange;
assert(start !== undefined && end !== undefined); assert(start !== undefined && end !== undefined);
response.setHeader('Content-Range', response.setHeader('Content-Range',
`bytes ${start}-${end}/${objectSize}`); `bytes ${start}-${end}/${objectSize}`);
} }
function sendError(res, log, error, optMessage) { function sendError(res, log, error, optMessage) {
@ -45,7 +45,6 @@ function sendError(res, log, error, optMessage) {
* start() to start listening to the configured port. * start() to start listening to the configured port.
*/ */
class RESTServer extends httpServer { class RESTServer extends httpServer {
/** /**
* @constructor * @constructor
* @param {Object} params - constructor params * @param {Object} params - constructor params
@ -227,7 +226,7 @@ class RESTServer extends httpServer {
return sendError(res, log, err); return sendError(res, log, err);
} }
log.debug('sending back 200/206 response with contents', log.debug('sending back 200/206 response with contents',
{ key: pathInfo.key }); { key: pathInfo.key });
setContentLength(res, contentLength); setContentLength(res, contentLength);
res.setHeader('Accept-Ranges', 'bytes'); res.setHeader('Accept-Ranges', 'bytes');
if (byteRange) { if (byteRange) {
@ -265,7 +264,7 @@ class RESTServer extends httpServer {
return sendError(res, log, err); return sendError(res, log, err);
} }
log.debug('sending back 204 response to DELETE', log.debug('sending back 204 response to DELETE',
{ key: pathInfo.key }); { key: pathInfo.key });
res.writeHead(204); res.writeHead(204);
return res.end(() => { return res.end(() => {
log.debug('DELETE response sent', { key: pathInfo.key }); log.debug('DELETE response sent', { key: pathInfo.key });

View File

@ -19,7 +19,7 @@ function explodePath(path) {
return { return {
service: pathMatch[1], service: pathMatch[1],
key: (pathMatch[3] !== undefined && pathMatch[3].length > 0 ? key: (pathMatch[3] !== undefined && pathMatch[3].length > 0 ?
pathMatch[3] : undefined), pathMatch[3] : undefined),
}; };
} }
throw errors.InvalidURI.customizeDescription('malformed URI'); throw errors.InvalidURI.customizeDescription('malformed URI');

View File

@ -17,7 +17,6 @@ const rpc = require('./rpc.js');
* RPC client object accessing the sub-level transparently. * RPC client object accessing the sub-level transparently.
*/ */
class LevelDbClient extends rpc.BaseClient { class LevelDbClient extends rpc.BaseClient {
/** /**
* @constructor * @constructor
* *
@ -78,7 +77,6 @@ class LevelDbClient extends rpc.BaseClient {
* env.subDb (env is passed as first parameter of received RPC calls). * env.subDb (env is passed as first parameter of received RPC calls).
*/ */
class LevelDbService extends rpc.BaseService { class LevelDbService extends rpc.BaseService {
/** /**
* @constructor * @constructor
* *

View File

@ -37,7 +37,6 @@ let streamRPCJSONObj;
* an error occurred). * an error occurred).
*/ */
class BaseClient extends EventEmitter { class BaseClient extends EventEmitter {
/** /**
* @constructor * @constructor
* *
@ -54,7 +53,7 @@ class BaseClient extends EventEmitter {
*/ */
constructor(params) { constructor(params) {
const { url, logger, callTimeoutMs, const { url, logger, callTimeoutMs,
streamMaxPendingAck, streamAckTimeoutMs } = params; streamMaxPendingAck, streamAckTimeoutMs } = params;
assert(url); assert(url);
assert(logger); assert(logger);
@ -82,11 +81,11 @@ class BaseClient extends EventEmitter {
_call(remoteCall, args, cb) { _call(remoteCall, args, cb) {
const wrapCb = (err, data) => { const wrapCb = (err, data) => {
cb(reconstructError(err), cb(reconstructError(err),
this.socketStreams.decodeStreams(data)); this.socketStreams.decodeStreams(data));
}; };
this.logger.debug('remote call', { remoteCall, args }); this.logger.debug('remote call', { remoteCall, args });
this.socket.emit('call', remoteCall, this.socket.emit('call', remoteCall,
this.socketStreams.encodeStreams(args), wrapCb); this.socketStreams.encodeStreams(args), wrapCb);
return undefined; return undefined;
} }
@ -113,8 +112,8 @@ class BaseClient extends EventEmitter {
throw new Error(`argument cb=${cb} is not a callback`); throw new Error(`argument cb=${cb} is not a callback`);
} }
async.timeout(this._call.bind(this), timeoutMs, async.timeout(this._call.bind(this), timeoutMs,
`operation ${remoteCall} timed out`)(remoteCall, `operation ${remoteCall} timed out`)(remoteCall,
args, cb); args, cb);
return undefined; return undefined;
} }
@ -142,7 +141,7 @@ class BaseClient extends EventEmitter {
const url = this.url; const url = this.url;
this.socket.on('error', err => { this.socket.on('error', err => {
this.logger.warn('connectivity error to the RPC service', this.logger.warn('connectivity error to the RPC service',
{ url, error: err }); { url, error: err });
}); });
this.socket.on('connect', () => { this.socket.on('connect', () => {
this.emit('connect'); this.emit('connect');
@ -156,7 +155,7 @@ class BaseClient extends EventEmitter {
this.getManifest((err, manifest) => { this.getManifest((err, manifest) => {
if (err) { if (err) {
this.logger.error('Error fetching manifest from RPC server', this.logger.error('Error fetching manifest from RPC server',
{ error: err }); { error: err });
} else { } else {
manifest.api.forEach(apiItem => { manifest.api.forEach(apiItem => {
this.createCall(apiItem.name); this.createCall(apiItem.name);
@ -251,7 +250,6 @@ class BaseClient extends EventEmitter {
* *
*/ */
class BaseService { class BaseService {
/** /**
* @constructor * @constructor
* *
@ -497,7 +495,7 @@ function RPCServer(params) {
conn.on('error', err => { conn.on('error', err => {
log.error('error on socket.io connection', log.error('error on socket.io connection',
{ namespace: service.namespace, error: err }); { namespace: service.namespace, error: err });
}); });
conn.on('call', (remoteCall, args, cb) => { conn.on('call', (remoteCall, args, cb) => {
const decodedArgs = streamsSocket.decodeStreams(args); const decodedArgs = streamsSocket.decodeStreams(args);
@ -647,8 +645,8 @@ streamRPCJSONObj = function _streamRPCJSONObj(obj, wstream, cb) {
// primitive types // primitive types
if (obj === undefined) { if (obj === undefined) {
wstream.write('null'); // if undefined elements are present in wstream.write('null'); // if undefined elements are present in
// arrays, convert them to JSON null // arrays, convert them to JSON null
// objects // objects
} else { } else {
wstream.write(JSON.stringify(obj)); wstream.write(JSON.stringify(obj));
} }

View File

@ -16,7 +16,7 @@ class SIOOutputStream extends stream.Writable {
constructor(socket, streamId, maxPendingAck, ackTimeoutMs) { constructor(socket, streamId, maxPendingAck, ackTimeoutMs) {
super({ objectMode: true }); super({ objectMode: true });
this._initOutputStream(socket, streamId, maxPendingAck, this._initOutputStream(socket, streamId, maxPendingAck,
ackTimeoutMs); ackTimeoutMs);
} }
_initOutputStream(socket, streamId, maxPendingAck, ackTimeoutMs) { _initOutputStream(socket, streamId, maxPendingAck, ackTimeoutMs) {
@ -194,7 +194,7 @@ class SIOStreamSocket {
this.socket.on('stream-data', (payload, cb) => { this.socket.on('stream-data', (payload, cb) => {
const { streamId, data } = payload; const { streamId, data } = payload;
log.debug('received \'stream-data\' event', log.debug('received \'stream-data\' event',
{ streamId, size: data.length }); { streamId, size: data.length });
const stream = this.remoteStreams[streamId]; const stream = this.remoteStreams[streamId];
if (!stream) { if (!stream) {
log.debug('no such remote stream registered', { streamId }); log.debug('no such remote stream registered', { streamId });
@ -280,15 +280,15 @@ class SIOStreamSocket {
let transportStream; let transportStream;
if (isReadStream) { if (isReadStream) {
transportStream = new SIOOutputStream(this, streamId, transportStream = new SIOOutputStream(this, streamId,
this.maxPendingAck, this.maxPendingAck,
this.ackTimeoutMs); this.ackTimeoutMs);
} else { } else {
transportStream = new SIOInputStream(this, streamId); transportStream = new SIOInputStream(this, streamId);
} }
this.localStreams[streamId] = arg; this.localStreams[streamId] = arg;
arg.once('close', () => { arg.once('close', () => {
log.debug('stream closed, removing from local streams', log.debug('stream closed, removing from local streams',
{ streamId }); { streamId });
delete this.localStreams[streamId]; delete this.localStreams[streamId];
}); });
arg.on('error', error => { arg.on('error', error => {
@ -350,8 +350,8 @@ class SIOStreamSocket {
stream = new SIOInputStream(this, streamId); stream = new SIOInputStream(this, streamId);
} else if (arg.writable) { } else if (arg.writable) {
stream = new SIOOutputStream(this, streamId, stream = new SIOOutputStream(this, streamId,
this.maxPendingAck, this.maxPendingAck,
this.ackTimeoutMs); this.ackTimeoutMs);
} else { } else {
throw new Error('can\'t decode stream neither readable ' + throw new Error('can\'t decode stream neither readable ' +
'nor writable'); 'nor writable');
@ -360,14 +360,14 @@ class SIOStreamSocket {
if (arg.readable) { if (arg.readable) {
stream.once('close', () => { stream.once('close', () => {
log.debug('stream closed, removing from remote streams', log.debug('stream closed, removing from remote streams',
{ streamId }); { streamId });
delete this.remoteStreams[streamId]; delete this.remoteStreams[streamId];
}); });
} }
if (arg.writable) { if (arg.writable) {
stream.once('finish', () => { stream.once('finish', () => {
log.debug('stream finished, removing from remote streams', log.debug('stream finished, removing from remote streams',
{ streamId }); { streamId });
delete this.remoteStreams[streamId]; delete this.remoteStreams[streamId];
}); });
} }
@ -399,7 +399,7 @@ class SIOStreamSocket {
_write(streamId, data, cb) { _write(streamId, data, cb) {
this.logger.debug('emit \'stream-data\' event', this.logger.debug('emit \'stream-data\' event',
{ streamId, size: data.length }); { streamId, size: data.length });
this.socket.emit('stream-data', { streamId, data }, cb); this.socket.emit('stream-data', { streamId, data }, cb);
} }

View File

@ -1,9 +1,10 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const Ajv = require('ajv'); import Ajv from 'ajv';
const userPolicySchema = require('./userPolicySchema'); import * as userPolicySchema from './userPolicySchema.json';
const resourcePolicySchema = require('./resourcePolicySchema'); import * as resourcePolicySchema from './resourcePolicySchema.json';
const errors = require('../errors'); import errors from '../errors';
import type { ArsenalError } from '../errors';
const ajValidate = new Ajv({ allErrors: true }); const ajValidate = new Ajv({ allErrors: true });
ajValidate.addMetaSchema(require('ajv/lib/refs/json-schema-draft-06.json')); ajValidate.addMetaSchema(require('ajv/lib/refs/json-schema-draft-06.json'));
@ -27,7 +28,7 @@ const errDict = {
}; };
// parse ajv errors and return early with the first relevant error // parse ajv errors and return early with the first relevant error
function _parseErrors(ajvErrors, policyType) { function _parseErrors(ajvErrors: Ajv.ErrorObject[], policyType: string) {
let parsedErr; let parsedErr;
if (policyType === 'user') { if (policyType === 'user') {
// deep copy is needed as we have to assign custom error description // deep copy is needed as we have to assign custom error description
@ -67,7 +68,7 @@ function _parseErrors(ajvErrors, policyType) {
} }
// parse JSON safely without throwing an exception // parse JSON safely without throwing an exception
function _safeJSONParse(s) { function _safeJSONParse(s: string): object {
try { try {
return JSON.parse(s); return JSON.parse(s);
} catch (e) { } catch (e) {
@ -75,9 +76,20 @@ function _safeJSONParse(s) {
} }
} }
/**
* @typedef ValidationResult
* @type Object
* @property {Array|null} error - list of validation errors or null
* @property {Bool} valid - true/false depending on the validation result
*/
interface ValidationResult {
error: ArsenalError;
valid: boolean;
}
// validates policy using the validation schema // validates policy using the validation schema
function _validatePolicy(type, policy) { function _validatePolicy(policyType: string, policy: string): ValidationResult {
if (type === 'user') { if (policyType === 'user') {
const parseRes = _safeJSONParse(policy); const parseRes = _safeJSONParse(policy);
if (parseRes instanceof Error) { if (parseRes instanceof Error) {
return { error: Object.assign({}, errors.MalformedPolicyDocument), return { error: Object.assign({}, errors.MalformedPolicyDocument),
@ -90,7 +102,7 @@ function _validatePolicy(type, policy) {
} }
return { error: null, valid: true }; return { error: null, valid: true };
} }
if (type === 'resource') { if (policyType === 'resource') {
const parseRes = _safeJSONParse(policy); const parseRes = _safeJSONParse(policy);
if (parseRes instanceof Error) { if (parseRes instanceof Error) {
return { error: Object.assign({}, errors.MalformedPolicy), return { error: Object.assign({}, errors.MalformedPolicy),
@ -105,19 +117,14 @@ function _validatePolicy(type, policy) {
} }
return { error: errors.NotImplemented, valid: false }; return { error: errors.NotImplemented, valid: false };
} }
/**
* @typedef ValidationResult
* @type Object
* @property {Array|null} error - list of validation errors or null
* @property {Bool} valid - true/false depending on the validation result
*/
/** /**
* Validates user policy * Validates user policy
* @param {String} policy - policy json * @param {String} policy - policy json
* @returns {Object} - returns object with properties error and value * @returns {Object} - returns object with properties error and value
* @returns {ValidationResult} - result of the validation * @returns {ValidationResult} - result of the validation
*/ */
function validateUserPolicy(policy) { function validateUserPolicy(policy: string): ValidationResult {
return _validatePolicy('user', policy); return _validatePolicy('user', policy);
} }
@ -127,11 +134,11 @@ function validateUserPolicy(policy) {
* @returns {Object} - returns object with properties error and value * @returns {Object} - returns object with properties error and value
* @returns {ValidationResult} - result of the validation * @returns {ValidationResult} - result of the validation
*/ */
function validateResourcePolicy(policy) { function validateResourcePolicy(policy: string): ValidationResult {
return _validatePolicy('resource', policy); return _validatePolicy('resource', policy);
} }
module.exports = { export {
validateUserPolicy, validateUserPolicy,
validateResourcePolicy, validateResourcePolicy,
}; };

View File

@ -50,7 +50,7 @@ evaluators.isResourceApplicable = (requestContext, statementResource, log) => {
requestResourceArr, true); requestResourceArr, true);
if (arnSegmentsMatch) { if (arnSegmentsMatch) {
log.trace('policy resource is applicable to request', log.trace('policy resource is applicable to request',
{ requestResource: resource, policyResource }); { requestResource: resource, policyResource });
return true; return true;
} }
continue; continue;
@ -224,21 +224,21 @@ evaluators.evaluatePolicy = (requestContext, policy, log) => {
// in policy, move on to next statement // in policy, move on to next statement
if (currentStatement.NotResource && if (currentStatement.NotResource &&
evaluators.isResourceApplicable(requestContext, evaluators.isResourceApplicable(requestContext,
currentStatement.NotResource, log)) { currentStatement.NotResource, log)) {
continue; continue;
} }
// If affirmative action is in policy and request action is not // If affirmative action is in policy and request action is not
// applicable, move on to next statement // applicable, move on to next statement
if (currentStatement.Action && if (currentStatement.Action &&
!evaluators.isActionApplicable(requestContext.getAction(), !evaluators.isActionApplicable(requestContext.getAction(),
currentStatement.Action, log)) { currentStatement.Action, log)) {
continue; continue;
} }
// If NotAction is in policy and action matches NotAction in policy, // If NotAction is in policy and action matches NotAction in policy,
// move on to next statement // move on to next statement
if (currentStatement.NotAction && if (currentStatement.NotAction &&
evaluators.isActionApplicable(requestContext.getAction(), evaluators.isActionApplicable(requestContext.getAction(),
currentStatement.NotAction, log)) { currentStatement.NotAction, log)) {
continue; continue;
} }
const conditionEval = currentStatement.Condition ? const conditionEval = currentStatement.Condition ?

View File

@ -39,11 +39,11 @@ conditions.findConditionKey = (key, requestContext) => {
// (see Boolean Condition Operators). // (see Boolean Condition Operators).
// Note: This key is only present if MFA was used. So, the following // Note: This key is only present if MFA was used. So, the following
// will not work: // will not work:
// "Condition" : // "Condition" :
// { "Bool" : { "aws:MultiFactorAuthPresent" : false } } // { "Bool" : { "aws:MultiFactorAuthPresent" : false } }
// Instead use: // Instead use:
// "Condition" : // "Condition" :
// { "Null" : { "aws:MultiFactorAuthPresent" : true } } // { "Null" : { "aws:MultiFactorAuthPresent" : true } }
map.set('aws:MultiFactorAuthPresent', map.set('aws:MultiFactorAuthPresent',
requestContext.getMultiFactorAuthPresent()); requestContext.getMultiFactorAuthPresent());
// aws:MultiFactorAuthAge Used to check how many seconds since // aws:MultiFactorAuthAge Used to check how many seconds since
@ -166,8 +166,8 @@ conditions.findConditionKey = (key, requestContext) => {
// so evaluation should be skipped // so evaluation should be skipped
map.set('s3:RequestObjectTagKeys', map.set('s3:RequestObjectTagKeys',
requestContext.getNeedTagEval() && requestContext.getRequestObjTags() requestContext.getNeedTagEval() && requestContext.getRequestObjTags()
? getTagKeys(requestContext.getRequestObjTags()) ? getTagKeys(requestContext.getRequestObjTags())
: undefined); : undefined);
return map.get(key); return map.get(key);
}; };
@ -191,7 +191,7 @@ function convertSpecialChars(string) {
return map[char]; return map[char];
} }
return string.replace(/(\$\{\*\})|(\$\{\?\})|(\$\{\$\})/g, return string.replace(/(\$\{\*\})|(\$\{\?\})|(\$\{\$\})/g,
characterMap); characterMap);
} }
/** /**
@ -425,10 +425,10 @@ conditions.convertConditionOperator = operator => {
return !operatorMap.ArnLike(key, value); return !operatorMap.ArnLike(key, value);
}, },
Null: function nullOperator(key, value) { Null: function nullOperator(key, value) {
// Null is used to check if a condition key is present. // Null is used to check if a condition key is present.
// The policy statement value should be either true (the key doesn't // The policy statement value should be either true (the key doesn't
// exist — it is null) or false (the key exists and its value is // exist — it is null) or false (the key exists and its value is
// not null). // not null).
if ((key === undefined || key === null) if ((key === undefined || key === null)
&& value[0] === 'true' || && value[0] === 'true' ||
(key !== undefined && key !== null) (key !== undefined && key !== null)

View File

@ -51,10 +51,10 @@ wildcards.handleWildcardInResource = arn => {
// Wildcards can be part of the resource ARN. // Wildcards can be part of the resource ARN.
// Wildcards do NOT span segments of the ARN (separated by ":") // Wildcards do NOT span segments of the ARN (separated by ":")
// Example: all elements in specific bucket: // Example: all elements in specific bucket:
// "Resource": "arn:aws:s3:::my_corporate_bucket/*" // "Resource": "arn:aws:s3:::my_corporate_bucket/*"
// ARN format: // ARN format:
// arn:partition:service:region:namespace:relative-id // arn:partition:service:region:namespace:relative-id
const arnArr = arn.split(':'); const arnArr = arn.split(':');
return arnArr.map(portion => wildcards.handleWildcards(portion)); return arnArr.map(portion => wildcards.handleWildcards(portion));
}; };

View File

@ -6,7 +6,6 @@ const crypto = require('crypto');
* data through a stream * data through a stream
*/ */
class MD5Sum extends Transform { class MD5Sum extends Transform {
/** /**
* @constructor * @constructor
*/ */
@ -40,7 +39,6 @@ class MD5Sum extends Transform {
this.emit('hashed'); this.emit('hashed');
callback(null); callback(null);
} }
} }
module.exports = MD5Sum; module.exports = MD5Sum;

View File

@ -73,7 +73,7 @@ class ResultsCollector extends EventEmitter {
* @property {Error} [results[].error] - error returned by Azure putting subpart * @property {Error} [results[].error] - error returned by Azure putting subpart
* @property {number} results[].subPartIndex - index of the subpart * @property {number} results[].subPartIndex - index of the subpart
*/ */
/** /**
* "error" event * "error" event
* @event ResultCollector#error * @event ResultCollector#error
* @type {(Error|undefined)} error - error returned by Azure last subpart * @type {(Error|undefined)} error - error returned by Azure last subpart

View File

@ -94,7 +94,7 @@ azureMpuUtils.getSubPartIds = (part, uploadId) =>
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex)); azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName, azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
log, cb) => { log, cb) => {
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId } const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
= params; = params;
const blockId = azureMpuUtils.getBlockId(uploadId, partNumber, 0); const blockId = azureMpuUtils.getBlockId(uploadId, partNumber, 0);
@ -107,31 +107,31 @@ log, cb) => {
request.pipe(passThrough); request.pipe(passThrough);
return errorWrapperFn('uploadPart', 'createBlockFromStream', return errorWrapperFn('uploadPart', 'createBlockFromStream',
[blockId, bucketName, objectKey, passThrough, size, options, [blockId, bucketName, objectKey, passThrough, size, options,
(err, result) => { (err, result) => {
if (err) { if (err) {
log.error('Error from Azure data backend uploadPart', log.error('Error from Azure data backend uploadPart',
{ error: err.message, dataStoreName }); { error: err.message, dataStoreName });
if (err.code === 'ContainerNotFound') { if (err.code === 'ContainerNotFound') {
return cb(errors.NoSuchBucket); return cb(errors.NoSuchBucket);
}
if (err.code === 'InvalidMd5') {
return cb(errors.InvalidDigest);
}
if (err.code === 'Md5Mismatch') {
return cb(errors.BadDigest);
}
return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err.message}`),
);
} }
if (err.code === 'InvalidMd5') { const md5 = result.headers['content-md5'] || '';
return cb(errors.InvalidDigest); const eTag = objectUtils.getHexMD5(md5);
} return cb(null, eTag, size);
if (err.code === 'Md5Mismatch') { }], log, cb);
return cb(errors.BadDigest);
}
return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err.message}`)
);
}
const md5 = result.headers['content-md5'] || '';
const eTag = objectUtils.getHexMD5(md5);
return cb(null, eTag, size);
}], log, cb);
}; };
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo, azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
subPartStream, subPartIndex, resultsCollector, log, cb) => { subPartStream, subPartIndex, resultsCollector, log, cb) => {
const { uploadId, partNumber, bucketName, objectKey } = partParams; const { uploadId, partNumber, bucketName, objectKey } = partParams;
const subPartSize = azureMpuUtils.getSubPartSize( const subPartSize = azureMpuUtils.getSubPartSize(
subPartInfo, subPartIndex); subPartInfo, subPartIndex);
@ -140,11 +140,11 @@ subPartStream, subPartIndex, resultsCollector, log, cb) => {
resultsCollector.pushOp(); resultsCollector.pushOp();
errorWrapperFn('uploadPart', 'createBlockFromStream', errorWrapperFn('uploadPart', 'createBlockFromStream',
[subPartId, bucketName, objectKey, subPartStream, subPartSize, [subPartId, bucketName, objectKey, subPartStream, subPartSize,
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb); {}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
}; };
azureMpuUtils.putSubParts = (errorWrapperFn, request, params, azureMpuUtils.putSubParts = (errorWrapperFn, request, params,
dataStoreName, log, cb) => { dataStoreName, log, cb) => {
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size); const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
const resultsCollector = new ResultsCollector(); const resultsCollector = new ResultsCollector();
const hashedStream = new MD5Sum(); const hashedStream = new MD5Sum();

View File

@ -31,9 +31,9 @@ convertMethods.listMultipartUploads = xmlParams => {
const l = xmlParams.list; const l = xmlParams.list;
xml.push('<?xml version="1.0" encoding="UTF-8"?>', xml.push('<?xml version="1.0" encoding="UTF-8"?>',
'<ListMultipartUploadsResult ' + '<ListMultipartUploadsResult ' +
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">', 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
`<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>` `<Bucket>${escapeForXml(xmlParams.bucketName)}</Bucket>`,
); );
// For certain XML elements, if it is `undefined`, AWS returns either an // For certain XML elements, if it is `undefined`, AWS returns either an
@ -58,7 +58,7 @@ convertMethods.listMultipartUploads = xmlParams => {
}); });
xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`, xml.push(`<MaxUploads>${escapeForXml(l.MaxKeys)}</MaxUploads>`,
`<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>` `<IsTruncated>${escapeForXml(l.IsTruncated)}</IsTruncated>`,
); );
l.Uploads.forEach(upload => { l.Uploads.forEach(upload => {
@ -69,29 +69,29 @@ convertMethods.listMultipartUploads = xmlParams => {
} }
xml.push('<Upload>', xml.push('<Upload>',
`<Key>${escapeForXml(key)}</Key>`, `<Key>${escapeForXml(key)}</Key>`,
`<UploadId>${escapeForXml(val.UploadId)}</UploadId>`, `<UploadId>${escapeForXml(val.UploadId)}</UploadId>`,
'<Initiator>', '<Initiator>',
`<ID>${escapeForXml(val.Initiator.ID)}</ID>`, `<ID>${escapeForXml(val.Initiator.ID)}</ID>`,
`<DisplayName>${escapeForXml(val.Initiator.DisplayName)}` + `<DisplayName>${escapeForXml(val.Initiator.DisplayName)}` +
'</DisplayName>', '</DisplayName>',
'</Initiator>', '</Initiator>',
'<Owner>', '<Owner>',
`<ID>${escapeForXml(val.Owner.ID)}</ID>`, `<ID>${escapeForXml(val.Owner.ID)}</ID>`,
`<DisplayName>${escapeForXml(val.Owner.DisplayName)}` + `<DisplayName>${escapeForXml(val.Owner.DisplayName)}` +
'</DisplayName>', '</DisplayName>',
'</Owner>', '</Owner>',
`<StorageClass>${escapeForXml(val.StorageClass)}` + `<StorageClass>${escapeForXml(val.StorageClass)}` +
'</StorageClass>', '</StorageClass>',
`<Initiated>${escapeForXml(val.Initiated)}</Initiated>`, `<Initiated>${escapeForXml(val.Initiated)}</Initiated>`,
'</Upload>' '</Upload>',
); );
}); });
l.CommonPrefixes.forEach(prefix => { l.CommonPrefixes.forEach(prefix => {
xml.push('<CommonPrefixes>', xml.push('<CommonPrefixes>',
`<Prefix>${escapeForXml(prefix)}</Prefix>`, `<Prefix>${escapeForXml(prefix)}</Prefix>`,
'</CommonPrefixes>' '</CommonPrefixes>',
); );
}); });

View File

@ -5,7 +5,6 @@ const Readable = require('stream').Readable;
* This class is used to produce zeros filled buffers for a reader consumption * This class is used to produce zeros filled buffers for a reader consumption
*/ */
class NullStream extends Readable { class NullStream extends Readable {
/** /**
* Construct a new zeros filled buffers producer that will * Construct a new zeros filled buffers producer that will
* produce as much bytes as specified by the range parameter, or the size * produce as much bytes as specified by the range parameter, or the size
@ -32,8 +31,8 @@ class NullStream extends Readable {
_read(size) { _read(size) {
const toRead = Math.min(size, this.bytesToRead); const toRead = Math.min(size, this.bytesToRead);
const buffer = toRead > 0 const buffer = toRead > 0
? Buffer.alloc(toRead, 0) ? Buffer.alloc(toRead, 0)
: null; : null;
this.bytesToRead -= toRead; this.bytesToRead -= toRead;
this.push(buffer); this.push(buffer);
} }

View File

@ -110,7 +110,7 @@ function generateMpuPartStorageInfo(filteredPartList) {
* and extraPartLocations * and extraPartLocations
*/ */
function validateAndFilterMpuParts(storedParts, jsonList, mpuOverviewKey, function validateAndFilterMpuParts(storedParts, jsonList, mpuOverviewKey,
splitter, log) { splitter, log) {
let storedPartsCopy = []; let storedPartsCopy = [];
const filteredPartsObj = {}; const filteredPartsObj = {};
filteredPartsObj.partList = []; filteredPartsObj.partList = [];

View File

@ -4,11 +4,11 @@ const errors = require('../errors');
const escapeForXml = require('./escapeForXml'); const escapeForXml = require('./escapeForXml');
const errorInvalidArgument = errors.InvalidArgument const errorInvalidArgument = errors.InvalidArgument
.customizeDescription('The header \'x-amz-tagging\' shall be ' + .customizeDescription('The header \'x-amz-tagging\' shall be ' +
'encoded as UTF-8 then URLEncoded URL query parameters without ' + 'encoded as UTF-8 then URLEncoded URL query parameters without ' +
'tag name duplicates.'); 'tag name duplicates.');
const errorBadRequestLimit50 = errors.BadRequest const errorBadRequestLimit50 = errors.BadRequest
.customizeDescription('Object tags cannot be greater than 50'); .customizeDescription('Object tags cannot be greater than 50');
/* /*
Format of xml request: Format of xml request:
@ -38,7 +38,7 @@ const _validator = {
result.Tagging.TagSet && result.Tagging.TagSet &&
result.Tagging.TagSet.length === 1 && result.Tagging.TagSet.length === 1 &&
( (
result.Tagging.TagSet[0] === '' || result.Tagging.TagSet[0] === '' ||
result.Tagging.TagSet[0] && result.Tagging.TagSet[0] &&
Object.keys(result.Tagging.TagSet[0]).length === 1 && Object.keys(result.Tagging.TagSet[0]).length === 1 &&
result.Tagging.TagSet[0].Tag && result.Tagging.TagSet[0].Tag &&
@ -155,7 +155,7 @@ function parseTagXml(xml, log, cb) {
function convertToXml(objectTags) { function convertToXml(objectTags) {
const xml = []; const xml = [];
xml.push('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>', xml.push('<?xml version="1.0" encoding="UTF-8" standalone="yes"?>',
'<Tagging> <TagSet>'); '<Tagging> <TagSet>');
if (objectTags && Object.keys(objectTags).length > 0) { if (objectTags && Object.keys(objectTags).length > 0) {
Object.keys(objectTags).forEach(key => { Object.keys(objectTags).forEach(key => {
xml.push(`<Tag><Key>${escapeForXml(key)}</Key>` + xml.push(`<Tag><Key>${escapeForXml(key)}</Key>` +

View File

@ -42,7 +42,7 @@ function checkBucketAndKey(bucketName, objectKey, method, reqQuery,
log.debug('empty bucket name', { method: 'routes' }); log.debug('empty bucket name', { method: 'routes' });
return (method !== 'OPTIONS') ? return (method !== 'OPTIONS') ?
errors.MethodNotAllowed : errors.AccessForbidden errors.MethodNotAllowed : errors.AccessForbidden
.customizeDescription('CORSResponse: Bucket not found'); .customizeDescription('CORSResponse: Bucket not found');
} }
if (bucketName !== undefined && routesUtils.isValidBucketName(bucketName, if (bucketName !== undefined && routesUtils.isValidBucketName(bucketName,
blacklistedPrefixes.bucket) === false) { blacklistedPrefixes.bucket) === false) {
@ -90,7 +90,7 @@ function checkTypes(req, res, params, logger) {
'bad routes param: internalHandlers must be an object'); 'bad routes param: internalHandlers must be an object');
if (params.statsClient) { if (params.statsClient) {
assert.strictEqual(typeof params.statsClient, 'object', assert.strictEqual(typeof params.statsClient, 'object',
'bad routes param: statsClient must be an object'); 'bad routes param: statsClient must be an object');
} }
assert(Array.isArray(params.allEndpoints), assert(Array.isArray(params.allEndpoints),
'bad routes param: allEndpoints must be an array'); 'bad routes param: allEndpoints must be an array');
@ -98,13 +98,13 @@ function checkTypes(req, res, params, logger) {
'bad routes param: allEndpoints must have at least one endpoint'); 'bad routes param: allEndpoints must have at least one endpoint');
params.allEndpoints.forEach(endpoint => { params.allEndpoints.forEach(endpoint => {
assert.strictEqual(typeof endpoint, 'string', assert.strictEqual(typeof endpoint, 'string',
'bad routes param: each item in allEndpoints must be a string'); 'bad routes param: each item in allEndpoints must be a string');
}); });
assert(Array.isArray(params.websiteEndpoints), assert(Array.isArray(params.websiteEndpoints),
'bad routes param: allEndpoints must be an array'); 'bad routes param: allEndpoints must be an array');
params.websiteEndpoints.forEach(endpoint => { params.websiteEndpoints.forEach(endpoint => {
assert.strictEqual(typeof endpoint, 'string', assert.strictEqual(typeof endpoint, 'string',
'bad routes param: each item in websiteEndpoints must be a string'); 'bad routes param: each item in websiteEndpoints must be a string');
}); });
assert.strictEqual(typeof params.blacklistedPrefixes, 'object', assert.strictEqual(typeof params.blacklistedPrefixes, 'object',
'bad routes param: blacklistedPrefixes must be an object'); 'bad routes param: blacklistedPrefixes must be an object');
@ -112,13 +112,13 @@ function checkTypes(req, res, params, logger) {
'bad routes param: blacklistedPrefixes.bucket must be an array'); 'bad routes param: blacklistedPrefixes.bucket must be an array');
params.blacklistedPrefixes.bucket.forEach(pre => { params.blacklistedPrefixes.bucket.forEach(pre => {
assert.strictEqual(typeof pre, 'string', assert.strictEqual(typeof pre, 'string',
'bad routes param: each blacklisted bucket prefix must be a string'); 'bad routes param: each blacklisted bucket prefix must be a string');
}); });
assert(Array.isArray(params.blacklistedPrefixes.object), assert(Array.isArray(params.blacklistedPrefixes.object),
'bad routes param: blacklistedPrefixes.object must be an array'); 'bad routes param: blacklistedPrefixes.object must be an array');
params.blacklistedPrefixes.object.forEach(pre => { params.blacklistedPrefixes.object.forEach(pre => {
assert.strictEqual(typeof pre, 'string', assert.strictEqual(typeof pre, 'string',
'bad routes param: each blacklisted object prefix must be a string'); 'bad routes param: each blacklisted object prefix must be a string');
}); });
assert.strictEqual(typeof params.dataRetrievalParams, 'object', assert.strictEqual(typeof params.dataRetrievalParams, 'object',
'bad routes param: dataRetrievalParams must be a defined object'); 'bad routes param: dataRetrievalParams must be a defined object');
@ -173,8 +173,8 @@ function routes(req, res, params, logger) {
reqUids = undefined; reqUids = undefined;
} }
const log = (reqUids !== undefined ? const log = (reqUids !== undefined ?
logger.newRequestLoggerFromSerializedUids(reqUids) : logger.newRequestLoggerFromSerializedUids(reqUids) :
logger.newRequestLogger()); logger.newRequestLogger());
if (!req.url.startsWith('/_/healthcheck') && if (!req.url.startsWith('/_/healthcheck') &&
!req.url.startsWith('/_/report')) { !req.url.startsWith('/_/report')) {
@ -210,7 +210,7 @@ function routes(req, res, params, logger) {
return routesUtils.responseXMLBody( return routesUtils.responseXMLBody(
errors.InvalidURI.customizeDescription('Could not parse the ' + errors.InvalidURI.customizeDescription('Could not parse the ' +
'specified URI. Check your restEndpoints configuration.'), 'specified URI. Check your restEndpoints configuration.'),
undefined, res, log); undefined, res, log);
} }
log.addDefaultFields({ log.addDefaultFields({
@ -232,7 +232,7 @@ function routes(req, res, params, logger) {
if (bucketOrKeyError) { if (bucketOrKeyError) {
log.trace('error with bucket or key value', log.trace('error with bucket or key value',
{ error: bucketOrKeyError }); { error: bucketOrKeyError });
return routesUtils.responseXMLBody(bucketOrKeyError, null, res, log); return routesUtils.responseXMLBody(bucketOrKeyError, null, res, log);
} }

View File

@ -7,7 +7,7 @@ function routeDELETE(request, response, api, log, statsClient) {
if (request.query.uploadId) { if (request.query.uploadId) {
if (request.objectKey === undefined) { if (request.objectKey === undefined) {
return routesUtils.responseNoBody( return routesUtils.responseNoBody(
errors.InvalidRequest.customizeDescription('A key must be ' + errors.InvalidRequest.customizeDescription('A key must be ' +
'specified'), null, response, 200, log); 'specified'), null, response, 200, log);
} }
api.callApiMethod('multipartDelete', request, response, log, api.callApiMethod('multipartDelete', request, response, log,
@ -19,77 +19,77 @@ function routeDELETE(request, response, api, log, statsClient) {
} else if (request.objectKey === undefined) { } else if (request.objectKey === undefined) {
if (request.query.website !== undefined) { if (request.query.website !== undefined) {
return api.callApiMethod('bucketDeleteWebsite', request, return api.callApiMethod('bucketDeleteWebsite', request,
response, log, (err, corsHeaders) => { response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log); response, 204, log);
}); });
} else if (request.query.cors !== undefined) { } else if (request.query.cors !== undefined) {
return api.callApiMethod('bucketDeleteCors', request, response, return api.callApiMethod('bucketDeleteCors', request, response,
log, (err, corsHeaders) => { log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log); response, 204, log);
}); });
} else if (request.query.replication !== undefined) { } else if (request.query.replication !== undefined) {
return api.callApiMethod('bucketDeleteReplication', request, return api.callApiMethod('bucketDeleteReplication', request,
response, log, (err, corsHeaders) => { response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log); response, 204, log);
}); });
} else if (request.query.lifecycle !== undefined) { } else if (request.query.lifecycle !== undefined) {
return api.callApiMethod('bucketDeleteLifecycle', request, return api.callApiMethod('bucketDeleteLifecycle', request,
response, log, (err, corsHeaders) => { response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log); response, 204, log);
}); });
} else if (request.query.policy !== undefined) { } else if (request.query.policy !== undefined) {
return api.callApiMethod('bucketDeletePolicy', request, return api.callApiMethod('bucketDeletePolicy', request,
response, log, (err, corsHeaders) => { response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log); response, 204, log);
}); });
} else if (request.query.encryption !== undefined) { } else if (request.query.encryption !== undefined) {
return api.callApiMethod('bucketDeleteEncryption', request, return api.callApiMethod('bucketDeleteEncryption', request,
response, log, (err, corsHeaders) => { response, log, (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 204, log); response, 204, log);
}); });
} }
api.callApiMethod('bucketDelete', request, response, log, api.callApiMethod('bucketDelete', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, response, return routesUtils.responseNoBody(err, corsHeaders, response,
204, log); 204, log);
}); });
} else { } else {
if (request.query.tagging !== undefined) { if (request.query.tagging !== undefined) {
return api.callApiMethod('objectDeleteTagging', request, return api.callApiMethod('objectDeleteTagging', request,
response, log, (err, resHeaders) => { response, log, (err, resHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders, return routesUtils.responseNoBody(err, resHeaders,
response, 204, log); response, 204, log);
}); });
} }
api.callApiMethod('objectDelete', request, response, log, api.callApiMethod('objectDelete', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
/* /*
* Since AWS expects a 204 regardless of the existence of * Since AWS expects a 204 regardless of the existence of
the object, the errors NoSuchKey and NoSuchVersion should not the object, the errors NoSuchKey and NoSuchVersion should not
* be sent back as a response. * be sent back as a response.
*/ */
if (err && !err.NoSuchKey && !err.NoSuchVersion) { if (err && !err.NoSuchKey && !err.NoSuchVersion) {
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, null, log); response, null, log);
} }
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(null, corsHeaders, response, return routesUtils.responseNoBody(null, corsHeaders, response,
204, log); 204, log);
}); });
} }
return undefined; return undefined;
} }

View File

@ -2,7 +2,7 @@ const errors = require('../../errors');
const routesUtils = require('../routesUtils'); const routesUtils = require('../routesUtils');
function routerGET(request, response, api, log, statsClient, function routerGET(request, response, api, log, statsClient,
dataRetrievalParams) { dataRetrievalParams) {
log.debug('routing request', { method: 'routerGET' }); log.debug('routing request', { method: 'routerGET' });
if (request.bucketName === undefined && request.objectKey !== undefined) { if (request.bucketName === undefined && request.objectKey !== undefined) {
routesUtils.responseXMLBody(errors.NoSuchBucket, null, response, log); routesUtils.responseXMLBody(errors.NoSuchBucket, null, response, log);
@ -17,18 +17,18 @@ dataRetrievalParams) {
// GET bucket ACL // GET bucket ACL
if (request.query.acl !== undefined) { if (request.query.acl !== undefined) {
api.callApiMethod('bucketGetACL', request, response, log, api.callApiMethod('bucketGetACL', request, response, log,
(err, xml, corsHeaders) => { (err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log, return routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders); corsHeaders);
}); });
} else if (request.query.replication !== undefined) { } else if (request.query.replication !== undefined) {
api.callApiMethod('bucketGetReplication', request, response, log, api.callApiMethod('bucketGetReplication', request, response, log,
(err, xml, corsHeaders) => { (err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log, return routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders); corsHeaders);
}); });
} else if (request.query.cors !== undefined) { } else if (request.query.cors !== undefined) {
api.callApiMethod('bucketGetCors', request, response, log, api.callApiMethod('bucketGetCors', request, response, log,
(err, xml, corsHeaders) => { (err, xml, corsHeaders) => {
@ -70,7 +70,7 @@ dataRetrievalParams) {
(err, xml, corsHeaders) => { (err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log, return routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders); corsHeaders);
}); });
} else if (request.query.policy !== undefined) { } else if (request.query.policy !== undefined) {
api.callApiMethod('bucketGetPolicy', request, response, log, api.callApiMethod('bucketGetPolicy', request, response, log,
@ -95,11 +95,11 @@ dataRetrievalParams) {
}); });
} else if (request.query.encryption !== undefined) { } else if (request.query.encryption !== undefined) {
api.callApiMethod('bucketGetEncryption', request, response, log, api.callApiMethod('bucketGetEncryption', request, response, log,
(err, xml, corsHeaders) => { (err, xml, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, return routesUtils.responseXMLBody(err, xml, response,
log, corsHeaders); log, corsHeaders);
}); });
} else { } else {
// GET bucket // GET bucket
api.callApiMethod('bucketGet', request, response, log, api.callApiMethod('bucketGet', request, response, log,

View File

@ -21,11 +21,11 @@ function routeOPTIONS(request, response, api, log, statsClient) {
} }
return api.callApiMethod('corsPreflight', request, response, log, return api.callApiMethod('corsPreflight', request, response, log,
(err, resHeaders) => { (err, resHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders, response, 200, return routesUtils.responseNoBody(err, resHeaders, response, 200,
log); log);
}); });
} }
module.exports = routeOPTIONS; module.exports = routeOPTIONS;

View File

@ -27,28 +27,28 @@ function routePOST(request, response, api, log) {
if (request.query.uploads !== undefined) { if (request.query.uploads !== undefined) {
return api.callApiMethod('initiateMultipartUpload', request, return api.callApiMethod('initiateMultipartUpload', request,
response, log, (err, result, corsHeaders) => response, log, (err, result, corsHeaders) =>
routesUtils.responseXMLBody(err, result, response, log, routesUtils.responseXMLBody(err, result, response, log,
corsHeaders)); corsHeaders));
} }
// POST complete multipart upload // POST complete multipart upload
if (request.query.uploadId !== undefined) { if (request.query.uploadId !== undefined) {
return api.callApiMethod('completeMultipartUpload', request, return api.callApiMethod('completeMultipartUpload', request,
response, log, (err, result, resHeaders) => response, log, (err, result, resHeaders) =>
routesUtils.responseXMLBody(err, result, response, log, routesUtils.responseXMLBody(err, result, response, log,
resHeaders)); resHeaders));
} }
// POST multiObjectDelete // POST multiObjectDelete
if (request.query.delete !== undefined) { if (request.query.delete !== undefined) {
return api.callApiMethod('multiObjectDelete', request, response, return api.callApiMethod('multiObjectDelete', request, response,
log, (err, xml, corsHeaders) => log, (err, xml, corsHeaders) =>
routesUtils.responseXMLBody(err, xml, response, log, routesUtils.responseXMLBody(err, xml, response, log,
corsHeaders)); corsHeaders));
} }
return routesUtils.responseNoBody(errors.NotImplemented, null, response, return routesUtils.responseNoBody(errors.NotImplemented, null, response,
200, log); 200, log);
} }
/* eslint-enable no-param-reassign */ /* eslint-enable no-param-reassign */
module.exports = routePOST; module.exports = routePOST;

View File

@ -14,16 +14,16 @@ function routePUT(request, response, api, log, statsClient) {
|| contentLength < 0)) || contentLength === '') { || contentLength < 0)) || contentLength === '') {
log.debug('invalid content-length header'); log.debug('invalid content-length header');
return routesUtils.responseNoBody( return routesUtils.responseNoBody(
errors.BadRequest, null, response, null, log); errors.BadRequest, null, response, null, log);
} }
// PUT bucket ACL // PUT bucket ACL
if (request.query.acl !== undefined) { if (request.query.acl !== undefined) {
api.callApiMethod('bucketPutACL', request, response, log, api.callApiMethod('bucketPutACL', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log); response, 200, log);
}); });
} else if (request.query.versioning !== undefined) { } else if (request.query.versioning !== undefined) {
api.callApiMethod('bucketPutVersioning', request, response, log, api.callApiMethod('bucketPutVersioning', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
@ -82,11 +82,11 @@ function routePUT(request, response, api, log, statsClient) {
}); });
} else if (request.query.encryption !== undefined) { } else if (request.query.encryption !== undefined) {
api.callApiMethod('bucketPutEncryption', request, response, log, api.callApiMethod('bucketPutEncryption', request, response, log,
(err, corsHeaders) => { (err, corsHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, corsHeaders, return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log); response, 200, log);
}); });
} else { } else {
// PUT bucket // PUT bucket
return api.callApiMethod('bucketPut', request, response, log, return api.callApiMethod('bucketPut', request, response, log,
@ -110,7 +110,7 @@ function routePUT(request, response, api, log, statsClient) {
method: 'routePUT', method: 'routePUT',
}); });
return routesUtils return routesUtils
.responseNoBody(errors.InvalidDigest, null, response, 200, log); .responseNoBody(errors.InvalidDigest, null, response, 200, log);
} }
if (request.headers['content-md5']) { if (request.headers['content-md5']) {
request.contentMD5 = request.headers['content-md5']; request.contentMD5 = request.headers['content-md5'];
@ -126,17 +126,17 @@ function routePUT(request, response, api, log, statsClient) {
}); });
return routesUtils return routesUtils
.responseNoBody(errors.InvalidDigest, null, response, 200, .responseNoBody(errors.InvalidDigest, null, response, 200,
log); log);
} }
} }
if (request.query.partNumber) { if (request.query.partNumber) {
if (request.headers['x-amz-copy-source']) { if (request.headers['x-amz-copy-source']) {
api.callApiMethod('objectPutCopyPart', request, response, log, api.callApiMethod('objectPutCopyPart', request, response, log,
(err, xml, additionalHeaders) => { (err, xml, additionalHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseXMLBody(err, xml, response, log, return routesUtils.responseXMLBody(err, xml, response, log,
additionalHeaders); additionalHeaders);
}); });
} else { } else {
api.callApiMethod('objectPutPart', request, response, log, api.callApiMethod('objectPutPart', request, response, log,
(err, calculatedHash, corsHeaders) => { (err, calculatedHash, corsHeaders) => {
@ -202,11 +202,11 @@ function routePUT(request, response, api, log, statsClient) {
contentLength: request.parsedContentLength, contentLength: request.parsedContentLength,
}); });
api.callApiMethod('objectPut', request, response, log, api.callApiMethod('objectPut', request, response, log,
(err, resHeaders) => { (err, resHeaders) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
return routesUtils.responseNoBody(err, resHeaders, return routesUtils.responseNoBody(err, resHeaders,
response, 200, log); response, 200, log);
}); });
} }
} }
return undefined; return undefined;

View File

@ -14,7 +14,7 @@ function routerWebsite(request, response, api, log, statsClient,
if (request.method === 'GET') { if (request.method === 'GET') {
return api.callApiMethod('websiteGet', request, response, log, return api.callApiMethod('websiteGet', request, response, log,
(err, userErrorPageFailure, dataGetInfo, resMetaHeaders, (err, userErrorPageFailure, dataGetInfo, resMetaHeaders,
redirectInfo, key) => { redirectInfo, key) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
// request being redirected // request being redirected
if (redirectInfo) { if (redirectInfo) {
@ -43,21 +43,21 @@ function routerWebsite(request, response, api, log, statsClient,
} }
if (request.method === 'HEAD') { if (request.method === 'HEAD') {
return api.callApiMethod('websiteHead', request, response, log, return api.callApiMethod('websiteHead', request, response, log,
(err, resMetaHeaders, redirectInfo, key) => { (err, resMetaHeaders, redirectInfo, key) => {
routesUtils.statsReport500(err, statsClient); routesUtils.statsReport500(err, statsClient);
if (redirectInfo) { if (redirectInfo) {
return routesUtils.redirectRequest(redirectInfo, return routesUtils.redirectRequest(redirectInfo,
key, request.connection.encrypted, key, request.connection.encrypted,
response, request.headers.host, resMetaHeaders, log); response, request.headers.host, resMetaHeaders, log);
} }
// could redirect on err so check for redirectInfo first // could redirect on err so check for redirectInfo first
if (err) { if (err) {
return routesUtils.errorHeaderResponse(err, response, return routesUtils.errorHeaderResponse(err, response,
resMetaHeaders, log); resMetaHeaders, log);
} }
return routesUtils.responseContentHeaders(err, {}, resMetaHeaders, return routesUtils.responseContentHeaders(err, {}, resMetaHeaders,
response, log); response, log);
}); });
} }
return undefined; return undefined;
} }

View File

@ -28,7 +28,7 @@ function setCommonResponseHeaders(headers, response, log) {
} catch (e) { } catch (e) {
log.debug('header can not be added ' + log.debug('header can not be added ' +
'to the response', { header: headers[key], 'to the response', { header: headers[key],
error: e.stack, method: 'setCommonResponseHeaders' }); error: e.stack, method: 'setCommonResponseHeaders' });
} }
} }
}); });
@ -71,7 +71,7 @@ const XMLResponseBackend = {
* @return {object} response - response object with additional headers * @return {object} response - response object with additional headers
*/ */
okResponse: function okXMLResponse(xml, response, log, okResponse: function okXMLResponse(xml, response, log,
additionalHeaders) { additionalHeaders) {
const bytesSent = Buffer.byteLength(xml); const bytesSent = Buffer.byteLength(xml);
log.trace('sending success xml response'); log.trace('sending success xml response');
log.addDefaultFields({ log.addDefaultFields({
@ -118,7 +118,7 @@ const XMLResponseBackend = {
`<Message>${errCode.description}</Message>`, `<Message>${errCode.description}</Message>`,
'<Resource></Resource>', '<Resource></Resource>',
`<RequestId>${log.getSerializedUids()}</RequestId>`, `<RequestId>${log.getSerializedUids()}</RequestId>`,
'</Error>' '</Error>',
); );
const xmlStr = xml.join(''); const xmlStr = xml.join('');
const bytesSent = Buffer.byteLength(xmlStr); const bytesSent = Buffer.byteLength(xmlStr);
@ -148,7 +148,7 @@ const JSONResponseBackend = {
* @return {object} response - response object with additional headers * @return {object} response - response object with additional headers
*/ */
okResponse: function okJSONResponse(json, response, log, okResponse: function okJSONResponse(json, response, log,
additionalHeaders) { additionalHeaders) {
const bytesSent = Buffer.byteLength(json); const bytesSent = Buffer.byteLength(json);
log.trace('sending success json response'); log.trace('sending success json response');
log.addDefaultFields({ log.addDefaultFields({
@ -166,7 +166,7 @@ const JSONResponseBackend = {
}, },
errorResponse: function errorJSONResponse(errCode, response, log, errorResponse: function errorJSONResponse(errCode, response, log,
corsHeaders) { corsHeaders) {
log.trace('sending error json response', { errCode }); log.trace('sending error json response', { errCode });
/* /*
{ {
@ -369,27 +369,27 @@ function retrieveData(locations, retrieveDataParams, response, log) {
currentStream = readable; currentStream = readable;
return readable.pipe(response, { end: false }); return readable.pipe(response, { end: false });
}), err => { }), err => {
currentStream = null; currentStream = null;
if (err) { if (err) {
log.debug('abort response due to error', { log.debug('abort response due to error', {
error: err.code, errMsg: err.message }); error: err.code, errMsg: err.message });
}
// call end for all cases (error/success) per node.js docs
// recommendation
response.end();
} }
// call end for all cases (error/success) per node.js docs
// recommendation
response.end();
},
); );
} }
function _responseBody(responseBackend, errCode, payload, response, log, function _responseBody(responseBackend, errCode, payload, response, log,
additionalHeaders) { additionalHeaders) {
if (errCode && !response.headersSent) { if (errCode && !response.headersSent) {
return responseBackend.errorResponse(errCode, response, log, return responseBackend.errorResponse(errCode, response, log,
additionalHeaders); additionalHeaders);
} }
if (!response.headersSent) { if (!response.headersSent) {
return responseBackend.okResponse(payload, response, log, return responseBackend.okResponse(payload, response, log,
additionalHeaders); additionalHeaders);
} }
return undefined; return undefined;
} }
@ -398,8 +398,8 @@ function _computeContentLengthFromLocation(dataLocations) {
return dataLocations.reduce( return dataLocations.reduce(
(sum, location) => (sum !== undefined && (sum, location) => (sum !== undefined &&
(typeof location.size === 'number' || typeof location.size === 'string') ? (typeof location.size === 'number' || typeof location.size === 'string') ?
sum + Number.parseInt(location.size, 10) : sum + Number.parseInt(location.size, 10) :
undefined), 0); undefined), 0);
} }
function _contentLengthMatchesLocations(contentLength, dataLocations) { function _contentLengthMatchesLocations(contentLength, dataLocations) {
@ -420,7 +420,7 @@ const routesUtils = {
*/ */
responseXMLBody(errCode, xml, response, log, additionalHeaders) { responseXMLBody(errCode, xml, response, log, additionalHeaders) {
return _responseBody(XMLResponseBackend, errCode, xml, response, return _responseBody(XMLResponseBackend, errCode, xml, response,
log, additionalHeaders); log, additionalHeaders);
}, },
/** /**
@ -434,7 +434,7 @@ const routesUtils = {
*/ */
responseJSONBody(errCode, json, response, log, additionalHeaders) { responseJSONBody(errCode, json, response, log, additionalHeaders) {
return _responseBody(JSONResponseBackend, errCode, json, response, return _responseBody(JSONResponseBackend, errCode, json, response,
log, additionalHeaders); log, additionalHeaders);
}, },
/** /**
@ -449,7 +449,7 @@ const routesUtils = {
responseNoBody(errCode, resHeaders, response, httpCode = 200, log) { responseNoBody(errCode, resHeaders, response, httpCode = 200, log) {
if (errCode && !response.headersSent) { if (errCode && !response.headersSent) {
return XMLResponseBackend.errorResponse(errCode, response, log, return XMLResponseBackend.errorResponse(errCode, response, log,
resHeaders); resHeaders);
} }
if (!response.headersSent) { if (!response.headersSent) {
return okHeaderResponse(resHeaders, response, httpCode, log); return okHeaderResponse(resHeaders, response, httpCode, log);
@ -467,10 +467,10 @@ const routesUtils = {
* @return {object} - router's response object * @return {object} - router's response object
*/ */
responseContentHeaders(errCode, overrideParams, resHeaders, response, responseContentHeaders(errCode, overrideParams, resHeaders, response,
log) { log) {
if (errCode && !response.headersSent) { if (errCode && !response.headersSent) {
return XMLResponseBackend.errorResponse(errCode, response, log, return XMLResponseBackend.errorResponse(errCode, response, log,
resHeaders); resHeaders);
} }
if (!response.headersSent) { if (!response.headersSent) {
// Undefined added as an argument since need to send range to // Undefined added as an argument since need to send range to
@ -505,7 +505,7 @@ const routesUtils = {
retrieveDataParams, response, range, log) { retrieveDataParams, response, range, log) {
if (errCode && !response.headersSent) { if (errCode && !response.headersSent) {
return XMLResponseBackend.errorResponse(errCode, response, log, return XMLResponseBackend.errorResponse(errCode, response, log,
resHeaders); resHeaders);
} }
if (dataLocations !== null && !response.headersSent) { if (dataLocations !== null && !response.headersSent) {
// sanity check of content length against individual data // sanity check of content length against individual data
@ -513,13 +513,13 @@ const routesUtils = {
const contentLength = resHeaders && resHeaders['Content-Length']; const contentLength = resHeaders && resHeaders['Content-Length'];
if (contentLength !== undefined && if (contentLength !== undefined &&
!_contentLengthMatchesLocations(contentLength, !_contentLengthMatchesLocations(contentLength,
dataLocations)) { dataLocations)) {
log.error('logic error: total length of fetched data ' + log.error('logic error: total length of fetched data ' +
'locations does not match returned content-length', 'locations does not match returned content-length',
{ contentLength, dataLocations }); { contentLength, dataLocations });
return XMLResponseBackend.errorResponse(errors.InternalError, return XMLResponseBackend.errorResponse(errors.InternalError,
response, log, response, log,
resHeaders); resHeaders);
} }
} }
if (!response.headersSent) { if (!response.headersSent) {
@ -592,7 +592,7 @@ const routesUtils = {
`<h1>${err.code} ${response.statusMessage}</h1>`, `<h1>${err.code} ${response.statusMessage}</h1>`,
'<ul>', '<ul>',
`<li>Code: ${err.message}</li>`, `<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>` `<li>Message: ${err.description}</li>`,
); );
if (!userErrorPageFailure && bucketName) { if (!userErrorPageFailure && bucketName) {
@ -602,7 +602,7 @@ const routesUtils = {
`<li>RequestId: ${log.getSerializedUids()}</li>`, `<li>RequestId: ${log.getSerializedUids()}</li>`,
// AWS response contains HostId here. // AWS response contains HostId here.
// TODO: consider adding // TODO: consider adding
'</ul>' '</ul>',
); );
if (userErrorPageFailure) { if (userErrorPageFailure) {
html.push( html.push(
@ -612,13 +612,13 @@ const routesUtils = {
'<ul>', '<ul>',
`<li>Code: ${err.message}</li>`, `<li>Code: ${err.message}</li>`,
`<li>Message: ${err.description}</li>`, `<li>Message: ${err.description}</li>`,
'</ul>' '</ul>',
); );
} }
html.push( html.push(
'<hr/>', '<hr/>',
'</body>', '</body>',
'</html>' '</html>',
); );
return response.end(html.join(''), 'utf8', () => { return response.end(html.join(''), 'utf8', () => {
@ -840,7 +840,7 @@ const routesUtils = {
// most specific potential hostname // most specific potential hostname
bucketName = bucketName =
potentialBucketName.length < bucketName.length ? potentialBucketName.length < bucketName.length ?
potentialBucketName : bucketName; potentialBucketName : bucketName;
} }
} }
} }
@ -848,7 +848,7 @@ const routesUtils = {
return bucketName; return bucketName;
} }
throw new Error( throw new Error(
`bad request: hostname ${host} is not in valid endpoints` `bad request: hostname ${host} is not in valid endpoints`,
); );
}, },

View File

@ -1,6 +1,6 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const randomBytes = require('crypto').randomBytes; import { randomBytes } from 'crypto';
/* /*
* This set of function allows us to create an efficient shuffle * This set of function allows us to create an efficient shuffle
@ -18,13 +18,13 @@ const randomBytes = require('crypto').randomBytes;
* @return {number} the lowest number of bits * @return {number} the lowest number of bits
* @throws Error if number < 0 * @throws Error if number < 0
*/ */
function bitsNeeded(number) { function bitsNeeded(num: number): number {
if (number < 0) { if (num < 0) {
throw new Error('Input must be greater than or equal to zero'); throw new Error('Input must be greater than or equal to zero');
} else if (number === 0) { } else if (num === 0) {
return 1; return 1;
} else { } else {
return Math.floor(Math.log2(number)) + 1; return Math.floor(Math.log2(num)) + 1;
} }
} }
@ -36,7 +36,7 @@ function bitsNeeded(number) {
* if numbits === 0 * if numbits === 0
* @throws Error if numBits < 0 * @throws Error if numBits < 0
*/ */
function createMaskOnes(numBits) { function createMaskOnes(numBits: number): number {
if (numBits < 0) { if (numBits < 0) {
throw new Error('Input must be greater than or equal to zero'); throw new Error('Input must be greater than or equal to zero');
} }
@ -50,7 +50,7 @@ function createMaskOnes(numBits) {
* @return {buffer} a InRangebuffer with 'howMany' pseudo-random bytes. * @return {buffer} a InRangebuffer with 'howMany' pseudo-random bytes.
* @throws Error if numBytes < 0 or if insufficient entropy * @throws Error if numBytes < 0 or if insufficient entropy
*/ */
function nextBytes(numBytes) { function nextBytes(numBytes: number): Buffer {
if (numBytes < 0) { if (numBytes < 0) {
throw new Error('Input must be greater than or equal to zero'); throw new Error('Input must be greater than or equal to zero');
} }
@ -67,7 +67,7 @@ function nextBytes(numBytes) {
* @return {number} the number of bytes needed * @return {number} the number of bytes needed
* @throws Error if numBits < 0 * @throws Error if numBits < 0
*/ */
function bitsToBytes(numBits) { function bitsToBytes(numBits: number): number {
if (numBits < 0) { if (numBits < 0) {
throw new Error('Input must be greater than or equal to zero'); throw new Error('Input must be greater than or equal to zero');
} }
@ -83,7 +83,7 @@ function bitsToBytes(numBits) {
* @return {number} - a pseudo-random integer in [min,max], undefined if * @return {number} - a pseudo-random integer in [min,max], undefined if
* min >= max * min >= max
*/ */
function randomRange(min, max) { function randomRange(min: number, max: number): number {
if (max < min) { if (max < min) {
throw new Error('Invalid range'); throw new Error('Invalid range');
} }
@ -98,7 +98,7 @@ function randomRange(min, max) {
// we use a mask as an optimization: it increases the chances for the // we use a mask as an optimization: it increases the chances for the
// candidate to be in range // candidate to be in range
const mask = createMaskOnes(bits); const mask = createMaskOnes(bits);
let candidate; let candidate: number;
do { do {
candidate = parseInt(nextBytes(bytes).toString('hex'), 16) & mask; candidate = parseInt(nextBytes(bytes).toString('hex'), 16) & mask;
} while (candidate > range); } while (candidate > range);
@ -111,7 +111,7 @@ function randomRange(min, max) {
* @param {Array} array - Any type of array * @param {Array} array - Any type of array
* @return {Array} - The sorted array * @return {Array} - The sorted array
*/ */
module.exports = function shuffle(array) { export default function shuffle<T>(array: T[]): T[] {
for (let i = array.length - 1; i > 0; i--) { for (let i = array.length - 1; i > 0; i--) {
const randIndex = randomRange(0, i); const randIndex = randomRange(0, i);
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */

View File

@ -19,7 +19,7 @@ const externalVersioningErrorMessage = 'We do not currently support putting ' +
class DataWrapper { class DataWrapper {
constructor(client, implName, config, kms, metadata, locStorageCheckFn, constructor(client, implName, config, kms, metadata, locStorageCheckFn,
vault) { vault) {
this.client = client; this.client = client;
this.implName = implName; this.implName = implName;
this.config = config; this.config = config;
@ -36,31 +36,31 @@ class DataWrapper {
return cb(err); return cb(err);
} }
return this._put(cipherBundle, value, valueSize, keyContext, return this._put(cipherBundle, value, valueSize, keyContext,
backendInfo, log, (err, dataRetrievalInfo, hashedStream) => { backendInfo, log, (err, dataRetrievalInfo, hashedStream) => {
if (err) { if (err) {
// if error putting object, counter should be decremented // if error putting object, counter should be decremented
return this.locStorageCheckFn(location, -valueSize, log, return this.locStorageCheckFn(location, -valueSize, log,
error => { error => {
if (error) { if (error) {
log.error('Error decrementing location metric ' + log.error('Error decrementing location metric ' +
'following object PUT failure', 'following object PUT failure',
{ error: error.message }); { error: error.message });
} }
return cb(err); return cb(err);
}); });
}
if (hashedStream) {
if (hashedStream.completedHash) {
return cb(null, dataRetrievalInfo, hashedStream);
} }
hashedStream.on('hashed', () => { if (hashedStream) {
hashedStream.removeAllListeners('hashed'); if (hashedStream.completedHash) {
return cb(null, dataRetrievalInfo, hashedStream); return cb(null, dataRetrievalInfo, hashedStream);
}); }
return undefined; hashedStream.on('hashed', () => {
} hashedStream.removeAllListeners('hashed');
return cb(null, dataRetrievalInfo); return cb(null, dataRetrievalInfo, hashedStream);
}); });
return undefined;
}
return cb(null, dataRetrievalInfo);
});
}); });
} }
@ -97,33 +97,33 @@ class DataWrapper {
clientGetInfo.response = response; clientGetInfo.response = response;
} }
this.client.get(clientGetInfo, range, log.getSerializedUids(), this.client.get(clientGetInfo, range, log.getSerializedUids(),
(err, stream) => { (err, stream) => {
if (err) { if (err) {
log.error('get error from datastore', log.error('get error from datastore',
{ error: err, implName: this.implName }); { error: err, implName: this.implName });
return cb(errors.ServiceUnavailable); return cb(errors.ServiceUnavailable);
} }
if (objectGetInfo.cipheredDataKey) { if (objectGetInfo.cipheredDataKey) {
const serverSideEncryption = { const serverSideEncryption = {
cryptoScheme: objectGetInfo.cryptoScheme, cryptoScheme: objectGetInfo.cryptoScheme,
masterKeyId: objectGetInfo.masterKeyId, masterKeyId: objectGetInfo.masterKeyId,
cipheredDataKey: Buffer.from( cipheredDataKey: Buffer.from(
objectGetInfo.cipheredDataKey, 'base64'), objectGetInfo.cipheredDataKey, 'base64'),
}; };
const offset = objectGetInfo.range ? objectGetInfo.range[0] : 0; const offset = objectGetInfo.range ? objectGetInfo.range[0] : 0;
return this.kms.createDecipherBundle(serverSideEncryption, return this.kms.createDecipherBundle(serverSideEncryption,
offset, log, (err, decipherBundle) => { offset, log, (err, decipherBundle) => {
if (err) { if (err) {
log.error('cannot get decipher bundle from kms', log.error('cannot get decipher bundle from kms',
{ method: 'data.wrapper.data.get' }); { method: 'data.wrapper.data.get' });
return cb(err); return cb(err);
} }
stream.pipe(decipherBundle.decipher); stream.pipe(decipherBundle.decipher);
return cb(null, decipherBundle.decipher); return cb(null, decipherBundle.decipher);
}); });
} }
return cb(null, stream); return cb(null, stream);
}); });
} }
delete(objectGetInfo, log, cb) { delete(objectGetInfo, log, cb) {
@ -153,15 +153,15 @@ class DataWrapper {
if (!err) { if (!err) {
// pass size as negative so location metric is decremented // pass size as negative so location metric is decremented
return this.locStorageCheckFn(objectGetInfo.dataStoreName, return this.locStorageCheckFn(objectGetInfo.dataStoreName,
-objectGetInfo.size, log, err => { -objectGetInfo.size, log, err => {
if (err) { if (err) {
log.error('Utapi error pushing location metric', { log.error('Utapi error pushing location metric', {
error: err, error: err,
key: objectGetInfo.key, key: objectGetInfo.key,
method: 'locationStorageCheck' }); method: 'locationStorageCheck' });
} }
return callback(err); return callback(err);
}); });
} }
return callback(err); return callback(err);
}); });
@ -173,7 +173,7 @@ class DataWrapper {
// meantime, we at least log the location of the data we are // meantime, we at least log the location of the data we are
// about to delete before attempting its deletion. // about to delete before attempting its deletion.
if (this._shouldSkipDelete(locations, requestMethod, if (this._shouldSkipDelete(locations, requestMethod,
newObjDataStoreName)) { newObjDataStoreName)) {
return process.nextTick(cb); return process.nextTick(cb);
} }
log.trace('initiating batch delete', { log.trace('initiating batch delete', {
@ -210,7 +210,7 @@ class DataWrapper {
err => { err => {
if (err) { if (err) {
log.end().error('batch delete failed', { error: err }); log.end().error('batch delete failed', { error: err });
// deletion of non-existing objects result in 204 // deletion of non-existing objects result in 204
if (err.code === 404) { if (err.code === 404) {
return cb(); return cb();
} }
@ -233,27 +233,27 @@ class DataWrapper {
return cb(null, defResp); return cb(null, defResp);
} }
return this.client.healthcheck(flightCheckOnStartUp, log, return this.client.healthcheck(flightCheckOnStartUp, log,
(err, result) => { (err, result) => {
let respBody = {}; let respBody = {};
if (err) { if (err) {
log.error(`error from ${this.implName}`, { error: err }); log.error(`error from ${this.implName}`, { error: err });
respBody[this.implName] = {
error: err,
};
// error returned as null so async parallel doesn't return
// before all backends are checked
return cb(null, respBody);
}
if (this.implName === 'multipleBackends') {
respBody = result;
return cb(null, respBody);
}
respBody[this.implName] = { respBody[this.implName] = {
error: err, code: result.statusCode,
message: result.statusMessage,
}; };
// error returned as null so async parallel doesn't return
// before all backends are checked
return cb(null, respBody); return cb(null, respBody);
} });
if (this.implName === 'multipleBackends') {
respBody = result;
return cb(null, respBody);
}
respBody[this.implName] = {
code: result.statusCode,
message: result.statusMessage,
};
return cb(null, respBody);
});
} }
getDiskUsage(log, cb) { getDiskUsage(log, cb) {
@ -291,36 +291,36 @@ class DataWrapper {
* @returns {function} cb - callback * @returns {function} cb - callback
*/ */
copyObject(request, sourceLocationConstraintName, storeMetadataParams, copyObject(request, sourceLocationConstraintName, storeMetadataParams,
dataLocator, dataStoreContext, destBackendInfo, sourceBucketMD, dataLocator, dataStoreContext, destBackendInfo, sourceBucketMD,
destBucketMD, serverSideEncryption, log, cb) { destBucketMD, serverSideEncryption, log, cb) {
if (this.config.backends.data === 'multiple' && if (this.config.backends.data === 'multiple' &&
backendUtils.externalBackendCopy(this.config, backendUtils.externalBackendCopy(this.config,
sourceLocationConstraintName, storeMetadataParams.dataStoreName, sourceLocationConstraintName, storeMetadataParams.dataStoreName,
sourceBucketMD, destBucketMD) sourceBucketMD, destBucketMD)
&& serverSideEncryption === null) { && serverSideEncryption === null) {
const destLocationConstraintName = const destLocationConstraintName =
storeMetadataParams.dataStoreName; storeMetadataParams.dataStoreName;
const objectGetInfo = dataLocator[0]; const objectGetInfo = dataLocator[0];
const externalSourceKey = objectGetInfo.key; const externalSourceKey = objectGetInfo.key;
return this.client.copyObject(request, destLocationConstraintName, return this.client.copyObject(request, destLocationConstraintName,
externalSourceKey, sourceLocationConstraintName, externalSourceKey, sourceLocationConstraintName,
storeMetadataParams, this.config, log, storeMetadataParams, this.config, log,
(error, objectRetrievalInfo) => { (error, objectRetrievalInfo) => {
if (error) { if (error) {
return cb(error); return cb(error);
} }
const putResult = { const putResult = {
key: objectRetrievalInfo.key, key: objectRetrievalInfo.key,
dataStoreName: objectRetrievalInfo.dataStoreName, dataStoreName: objectRetrievalInfo.dataStoreName,
dataStoreType: objectRetrievalInfo.dataStoreType, dataStoreType: objectRetrievalInfo.dataStoreType,
dataStoreVersionId: objectRetrievalInfo.dataStoreVersionId, dataStoreVersionId: objectRetrievalInfo.dataStoreVersionId,
size: storeMetadataParams.size, size: storeMetadataParams.size,
dataStoreETag: objectGetInfo.dataStoreETag, dataStoreETag: objectGetInfo.dataStoreETag,
start: objectGetInfo.start, start: objectGetInfo.start,
}; };
const putResultArr = [putResult]; const putResultArr = [putResult];
return cb(null, putResultArr); return cb(null, putResultArr);
}); });
} }
const that = this; const that = this;
@ -330,37 +330,37 @@ class DataWrapper {
// copied at once. // copied at once.
return async.mapLimit(dataLocator, 1, return async.mapLimit(dataLocator, 1,
// eslint-disable-next-line prefer-arrow-callback // eslint-disable-next-line prefer-arrow-callback
function copyPart(part, copyCb) { function copyPart(part, copyCb) {
if (part.dataStoreType === 'azure') { if (part.dataStoreType === 'azure') {
const passThrough = new PassThrough(); const passThrough = new PassThrough();
return async.parallel([ return async.parallel([
parallelCb => that.get(part, passThrough, log, err => parallelCb => that.get(part, passThrough, log, err =>
parallelCb(err)), parallelCb(err)),
parallelCb => that._dataCopyPut(serverSideEncryption, parallelCb => that._dataCopyPut(serverSideEncryption,
passThrough, part, dataStoreContext, passThrough, part, dataStoreContext,
destBackendInfo, log, parallelCb), destBackendInfo, log, parallelCb),
], (err, res) => { ], (err, res) => {
if (err) {
return copyCb(err);
}
return copyCb(null, res[1]);
});
}
return that.get(part, null, log, (err, stream) => {
if (err) { if (err) {
return copyCb(err); return copyCb(err);
} }
return copyCb(null, res[1]); return that._dataCopyPut(serverSideEncryption, stream,
part, dataStoreContext, destBackendInfo, log, copyCb);
}); });
} }, (err, results) => {
return that.get(part, null, log, (err, stream) => {
if (err) { if (err) {
return copyCb(err); log.debug('error transferring data from source',
{ error: err });
return cb(err);
} }
return that._dataCopyPut(serverSideEncryption, stream, return cb(null, results);
part, dataStoreContext, destBackendInfo, log, copyCb);
}); });
}, (err, results) => {
if (err) {
log.debug('error transferring data from source',
{ error: err });
return cb(err);
}
return cb(null, results);
});
} }
/** /**
@ -386,8 +386,8 @@ class DataWrapper {
* @returns {function} cb - callback * @returns {function} cb - callback
*/ */
uploadPartCopy(request, log, destBucketMD, sourceLocationConstraintName, uploadPartCopy(request, log, destBucketMD, sourceLocationConstraintName,
destLocationConstraintName, dataLocator, dataStoreContext, lcCheckFn, destLocationConstraintName, dataLocator, dataStoreContext, lcCheckFn,
callback) { callback) {
const serverSideEncryption = destBucketMD.getServerSideEncryption(); const serverSideEncryption = destBucketMD.getServerSideEncryption();
const lastModified = new Date().toJSON(); const lastModified = new Date().toJSON();
@ -425,15 +425,15 @@ class DataWrapper {
if (locationTypeMatch && dataLocator.length === 1) { if (locationTypeMatch && dataLocator.length === 1) {
const sourceKey = dataLocator[0].key; const sourceKey = dataLocator[0].key;
return this.client.uploadPartCopy(request, return this.client.uploadPartCopy(request,
destLocationConstraintName, sourceKey, sourceLocationConstraintName, destLocationConstraintName, sourceKey, sourceLocationConstraintName,
this.config, log, (error, eTag) => { this.config, log, (error, eTag) => {
if (error) { if (error) {
return callback(error); return callback(error);
} }
const doSkip = srcType === 'aws' ? skipError : null; const doSkip = srcType === 'aws' ? skipError : null;
return callback(doSkip, eTag, lastModified, return callback(doSkip, eTag, lastModified,
serverSideEncryption); serverSideEncryption);
}); });
} }
const backendInfo = new BackendInfo(this._config, const backendInfo = new BackendInfo(this._config,
@ -448,72 +448,72 @@ class DataWrapper {
// in order so can get the ETag of full object // in order so can get the ETag of full object
return async.forEachOfSeries(dataLocator, return async.forEachOfSeries(dataLocator,
// eslint-disable-next-line prefer-arrow-callback // eslint-disable-next-line prefer-arrow-callback
function copyPart(part, index, cb) { function copyPart(part, index, cb) {
if (part.dataStoreType === 'azure') { if (part.dataStoreType === 'azure') {
const passThrough = new PassThrough(); const passThrough = new PassThrough();
return async.parallel([ return async.parallel([
next => that.get(part, passThrough, log, err => { next => that.get(part, passThrough, log, err => {
if (err) { if (err) {
log.error('error getting data part from Azure', log.error('error getting data part from Azure',
{ {
error: err, error: err,
method: 'objectPutCopyPart::' + method: 'objectPutCopyPart::' +
'multipleBackendGateway.copyPart', 'multipleBackendGateway.copyPart',
}); });
return next(err); return next(err);
}
return next();
}),
next => that._dataCopyPutPart(request,
serverSideEncryption, passThrough, part,
dataStoreContext, backendInfo, locations, log, next),
], err => {
if (err) {
return cb(err);
} }
return next(); return cb();
}), });
next => that._dataCopyPutPart(request, }
serverSideEncryption, passThrough, part, return that.get(part, null, log, (err, stream) => {
dataStoreContext, backendInfo, locations, log, next),
], err => {
if (err) { if (err) {
log.debug('error getting object part', { error: err });
return cb(err); return cb(err);
} }
return cb(); const hashedStream =
});
}
return that.get(part, null, log, (err, stream) => {
if (err) {
log.debug('error getting object part', { error: err });
return cb(err);
}
const hashedStream =
new RelayMD5Sum(totalHash, updatedHash => { new RelayMD5Sum(totalHash, updatedHash => {
totalHash = updatedHash; totalHash = updatedHash;
}); });
stream.pipe(hashedStream); stream.pipe(hashedStream);
// destLocationConstraintName is location of the // destLocationConstraintName is location of the
// destination MPU object // destination MPU object
return that._dataCopyPutPart(request, serverSideEncryption, return that._dataCopyPutPart(request, serverSideEncryption,
hashedStream, part, dataStoreContext, backendInfo, hashedStream, part, dataStoreContext, backendInfo,
locations, log, cb); locations, log, cb);
}); });
}, err => { }, err => {
// Digest the final combination of all of the part streams // Digest the final combination of all of the part streams
if (err && err !== skipError) { if (err && err !== skipError) {
log.debug('error transferring data from source', log.debug('error transferring data from source',
{ error: err, method: 'goGetData' }); { error: err, method: 'goGetData' });
return callback(err); return callback(err);
} }
if (totalHash) { if (totalHash) {
totalHash = totalHash.digest('hex'); totalHash = totalHash.digest('hex');
} else { } else {
totalHash = locations[0].dataStoreETag; totalHash = locations[0].dataStoreETag;
} }
if (err && err === skipError) { if (err && err === skipError) {
return callback(skipError, totalHash, lastModified, return callback(skipError, totalHash, lastModified,
serverSideEncryption); serverSideEncryption);
} }
return callback(null, totalHash, lastModified, return callback(null, totalHash, lastModified,
serverSideEncryption, locations); serverSideEncryption, locations);
}); });
} }
abortMPU(objectKey, uploadId, location, bucketName, request, destBucket, abortMPU(objectKey, uploadId, location, bucketName, request, destBucket,
lcCheckFn, log, callback) { lcCheckFn, log, callback) {
if (this.config.backends.data === 'multiple') { if (this.config.backends.data === 'multiple') {
// if controlling location constraint is not stored in object // if controlling location constraint is not stored in object
// metadata, mpu was initiated in legacy S3C, so need to // metadata, mpu was initiated in legacy S3C, so need to
@ -531,13 +531,13 @@ class DataWrapper {
} }
return this.client.abortMPU(objectKey, uploadId, location, return this.client.abortMPU(objectKey, uploadId, location,
bucketName, log, callback); bucketName, log, callback);
} }
return callback(null, false); return callback(null, false);
} }
completeMPU(request, mpuInfo, mdInfo, location, userMetadata, completeMPU(request, mpuInfo, mdInfo, location, userMetadata,
contentSettings, tagging, lcCheckFn, log, callback) { contentSettings, tagging, lcCheckFn, log, callback) {
const { objectKey, uploadId, jsonList, bucketName, destBucket } = const { objectKey, uploadId, jsonList, bucketName, destBucket } =
mpuInfo; mpuInfo;
if (this.config.backends.data === 'multiple') { if (this.config.backends.data === 'multiple') {
@ -576,30 +576,30 @@ class DataWrapper {
} = mpuInfo; } = mpuInfo;
if (this.config.backends.data === 'multiple') { if (this.config.backends.data === 'multiple') {
return this.client.createMPU(objectKey, metaHeaders, return this.client.createMPU(objectKey, metaHeaders,
bucketName, websiteRedirectHeader, locConstraint, contentType, bucketName, websiteRedirectHeader, locConstraint, contentType,
cacheControl, contentDisposition, contentEncoding, tagging, log, cacheControl, contentDisposition, contentEncoding, tagging, log,
(err, dataBackendResObj) => { (err, dataBackendResObj) => {
if (err) { if (err) {
return callback(err); return callback(err);
}
const configLoc =
this.config.locationConstraints[locConstraint];
if (locConstraint && configLoc && configLoc.type &&
constants.versioningNotImplBackends[configLoc.type]) {
const vcfg = destinationBucket.getVersioningConfiguration();
const isVersionedObj = vcfg && vcfg.Status === 'Enabled';
if (isVersionedObj) {
log.debug(externalVersioningErrorMessage,
{ method: 'initiateMultipartUpload',
error: errors.NotImplemented });
return callback(errors.NotImplemented
.customizeDescription(
externalVersioningErrorMessage),
null, isVersionedObj);
} }
} const configLoc =
return callback(null, dataBackendResObj); this.config.locationConstraints[locConstraint];
}); if (locConstraint && configLoc && configLoc.type &&
constants.versioningNotImplBackends[configLoc.type]) {
const vcfg = destinationBucket.getVersioningConfiguration();
const isVersionedObj = vcfg && vcfg.Status === 'Enabled';
if (isVersionedObj) {
log.debug(externalVersioningErrorMessage,
{ method: 'initiateMultipartUpload',
error: errors.NotImplemented });
return callback(errors.NotImplemented
.customizeDescription(
externalVersioningErrorMessage),
null, isVersionedObj);
}
}
return callback(null, dataBackendResObj);
});
} }
return callback(); return callback();
} }
@ -632,19 +632,19 @@ class DataWrapper {
location = mpuOverviewObj.controllingLocationConstraint; location = mpuOverviewObj.controllingLocationConstraint;
} }
return this.client.listParts(objectKey, uploadId, return this.client.listParts(objectKey, uploadId,
location, bucketName, partNumberMarker, maxParts, log, location, bucketName, partNumberMarker, maxParts, log,
(err, backendPartList) => { (err, backendPartList) => {
if (err) { if (err) {
return callback(err); return callback(err);
} }
return callback(null, backendPartList); return callback(null, backendPartList);
}); });
} }
return callback(); return callback();
} }
putPart(request, mpuInfo, streamingV4Params, objectLocationConstraint, putPart(request, mpuInfo, streamingV4Params, objectLocationConstraint,
lcCheckFn, log, callback) { lcCheckFn, log, callback) {
const { const {
stream, stream,
destinationBucket, destinationBucket,
@ -668,19 +668,19 @@ class DataWrapper {
objectLocationConstraint = backendInfoObj.controllingLC; objectLocationConstraint = backendInfoObj.controllingLC;
} }
return this.client.uploadPart(request, return this.client.uploadPart(request,
streamingV4Params, stream, size, objectLocationConstraint, streamingV4Params, stream, size, objectLocationConstraint,
objectKey, uploadId, partNumber, bucketName, log, objectKey, uploadId, partNumber, bucketName, log,
(err, partInfo) => { (err, partInfo) => {
if (err) { if (err) {
log.error('error putting part to data backend', { log.error('error putting part to data backend', {
error: err, error: err,
method: method:
'objectPutPart::multipleBackendGateway.uploadPart', 'objectPutPart::multipleBackendGateway.uploadPart',
}); });
return callback(err); return callback(err);
} }
return callback(null, partInfo, objectLocationConstraint); return callback(null, partInfo, objectLocationConstraint);
}); });
} }
return callback(); return callback();
} }
@ -688,7 +688,7 @@ class DataWrapper {
objectTagging(method, objectKey, bucket, objectMD, log, callback) { objectTagging(method, objectKey, bucket, objectMD, log, callback) {
if (this.config.backends.data === 'multiple') { if (this.config.backends.data === 'multiple') {
return this.client.objectTagging(method, objectKey, return this.client.objectTagging(method, objectKey,
bucket, objectMD, log, err => callback(err)); bucket, objectMD, log, err => callback(err));
} }
return callback(); return callback();
} }
@ -716,7 +716,7 @@ class DataWrapper {
return callback(); return callback();
} }
/** /**
* _putForCopy - put used for copying object * _putForCopy - put used for copying object
* @param {object} cipherBundle - cipher bundle that encrypt the data * @param {object} cipherBundle - cipher bundle that encrypt the data
* @param {object} stream - stream containing the data * @param {object} stream - stream containing the data
@ -735,34 +735,34 @@ class DataWrapper {
* @returns {function} cb - callback * @returns {function} cb - callback
*/ */
_putForCopy(cipherBundle, stream, part, dataStoreContext, destBackendInfo, _putForCopy(cipherBundle, stream, part, dataStoreContext, destBackendInfo,
log, cb) { log, cb) {
return this.put(cipherBundle, stream, part.size, dataStoreContext, return this.put(cipherBundle, stream, part.size, dataStoreContext,
destBackendInfo, log, (error, partRetrievalInfo) => { destBackendInfo, log, (error, partRetrievalInfo) => {
if (error) { if (error) {
return cb(error); return cb(error);
} }
const partResult = { const partResult = {
key: partRetrievalInfo.key, key: partRetrievalInfo.key,
dataStoreName: partRetrievalInfo dataStoreName: partRetrievalInfo
.dataStoreName, .dataStoreName,
dataStoreType: partRetrievalInfo dataStoreType: partRetrievalInfo
.dataStoreType, .dataStoreType,
start: part.start, start: part.start,
size: part.size, size: part.size,
}; };
if (cipherBundle) { if (cipherBundle) {
partResult.cryptoScheme = cipherBundle.cryptoScheme; partResult.cryptoScheme = cipherBundle.cryptoScheme;
partResult.cipheredDataKey = cipherBundle.cipheredDataKey; partResult.cipheredDataKey = cipherBundle.cipheredDataKey;
} }
if (part.dataStoreETag) { if (part.dataStoreETag) {
partResult.dataStoreETag = part.dataStoreETag; partResult.dataStoreETag = part.dataStoreETag;
} }
if (partRetrievalInfo.dataStoreVersionId) { if (partRetrievalInfo.dataStoreVersionId) {
partResult.dataStoreVersionId = partResult.dataStoreVersionId =
partRetrievalInfo.dataStoreVersionId; partRetrievalInfo.dataStoreVersionId;
} }
return cb(null, partResult); return cb(null, partResult);
}); });
} }
/** /**
@ -783,17 +783,17 @@ class DataWrapper {
* @returns {function} cb - callback * @returns {function} cb - callback
*/ */
_dataCopyPut(serverSideEncryption, stream, part, dataStoreContext, _dataCopyPut(serverSideEncryption, stream, part, dataStoreContext,
destBackendInfo, log, cb) { destBackendInfo, log, cb) {
if (serverSideEncryption) { if (serverSideEncryption) {
return this.kms.createCipherBundle(serverSideEncryption, log, return this.kms.createCipherBundle(serverSideEncryption, log,
(err, cipherBundle) => { (err, cipherBundle) => {
if (err) { if (err) {
log.debug('error getting cipherBundle'); log.debug('error getting cipherBundle');
return cb(errors.InternalError); return cb(errors.InternalError);
} }
return this._putForCopy(cipherBundle, stream, part, return this._putForCopy(cipherBundle, stream, part,
dataStoreContext, destBackendInfo, log, cb); dataStoreContext, destBackendInfo, log, cb);
}); });
} }
// Copied object is not encrypted so just put it // Copied object is not encrypted so just put it
// without a cipherBundle // without a cipherBundle
@ -802,7 +802,7 @@ class DataWrapper {
} }
_dataCopyPutPart(request, serverSideEncryption, stream, part, _dataCopyPutPart(request, serverSideEncryption, stream, part,
dataStoreContext, destBackendInfo, locations, log, cb) { dataStoreContext, destBackendInfo, locations, log, cb) {
const numberPartSize = Number.parseInt(part.size, 10); const numberPartSize = Number.parseInt(part.size, 10);
const partNumber = Number.parseInt(request.query.partNumber, 10); const partNumber = Number.parseInt(request.query.partNumber, 10);
const uploadId = request.query.uploadId; const uploadId = request.query.uploadId;
@ -811,100 +811,100 @@ class DataWrapper {
const destLocationConstraintName = destBackendInfo const destLocationConstraintName = destBackendInfo
.getControllingLocationConstraint(); .getControllingLocationConstraint();
if (externalBackends[this.config.locationConstraints if (externalBackends[this.config.locationConstraints
[destLocationConstraintName].type]) { [destLocationConstraintName].type]) {
return this.client.uploadPart(null, null, stream, return this.client.uploadPart(null, null, stream,
numberPartSize, destLocationConstraintName, destObjectKey, uploadId, numberPartSize, destLocationConstraintName, destObjectKey, uploadId,
partNumber, destBucketName, log, (err, partInfo) => { partNumber, destBucketName, log, (err, partInfo) => {
if (err) { if (err) {
log.error('error putting part to AWS', { log.error('error putting part to AWS', {
error: err, error: err,
method: 'objectPutCopyPart::' + method: 'objectPutCopyPart::' +
'multipleBackendGateway.uploadPart', 'multipleBackendGateway.uploadPart',
}); });
return cb(errors.ServiceUnavailable); return cb(errors.ServiceUnavailable);
} }
// skip to end of waterfall because don't need to store // skip to end of waterfall because don't need to store
// part metadata // part metadata
if (partInfo && partInfo.dataStoreType === 'aws_s3') { if (partInfo && partInfo.dataStoreType === 'aws_s3') {
// if data backend handles MPU, skip to end of waterfall // if data backend handles MPU, skip to end of waterfall
const partResult = { const partResult = {
dataStoreETag: partInfo.dataStoreETag, dataStoreETag: partInfo.dataStoreETag,
}; };
locations.push(partResult); locations.push(partResult);
return cb(skipError, partInfo.dataStoreETag); return cb(skipError, partInfo.dataStoreETag);
} else if (partInfo && partInfo.dataStoreType === 'azure') { } else if (partInfo && partInfo.dataStoreType === 'azure') {
const partResult = { const partResult = {
key: partInfo.key, key: partInfo.key,
dataStoreName: partInfo.dataStoreName, dataStoreName: partInfo.dataStoreName,
dataStoreETag: partInfo.dataStoreETag, dataStoreETag: partInfo.dataStoreETag,
size: numberPartSize, size: numberPartSize,
numberSubParts: partInfo.numberSubParts, numberSubParts: partInfo.numberSubParts,
partNumber: partInfo.partNumber, partNumber: partInfo.partNumber,
}; };
locations.push(partResult); locations.push(partResult);
return cb(); return cb();
} else if (partInfo && partInfo.dataStoreType === 'gcp') { } else if (partInfo && partInfo.dataStoreType === 'gcp') {
const partResult = { const partResult = {
key: partInfo.key, key: partInfo.key,
dataStoreName: partInfo.dataStoreName, dataStoreName: partInfo.dataStoreName,
dataStoreETag: partInfo.dataStoreETag, dataStoreETag: partInfo.dataStoreETag,
size: numberPartSize, size: numberPartSize,
partNumber: partInfo.partNumber, partNumber: partInfo.partNumber,
}; };
locations.push(partResult); locations.push(partResult);
return cb(); return cb();
} }
return cb(skipError); return cb(skipError);
}); });
} }
if (serverSideEncryption) { if (serverSideEncryption) {
return this.kms.createCipherBundle(serverSideEncryption, log, return this.kms.createCipherBundle(serverSideEncryption, log,
(err, cipherBundle) => { (err, cipherBundle) => {
if (err) { if (err) {
log.debug('error getting cipherBundle', { error: err }); log.debug('error getting cipherBundle', { error: err });
return cb(errors.InternalError); return cb(errors.InternalError);
}
return this.put(cipherBundle, stream, numberPartSize,
dataStoreContext, destBackendInfo, log,
(error, partRetrievalInfo, hashedStream) => {
if (error) {
log.debug('error putting encrypted part', { error });
return cb(error);
} }
const partResult = { return this.put(cipherBundle, stream, numberPartSize,
key: partRetrievalInfo.key, dataStoreContext, destBackendInfo, log,
dataStoreName: partRetrievalInfo.dataStoreName, (error, partRetrievalInfo, hashedStream) => {
dataStoreETag: hashedStream.completedHash, if (error) {
// Do not include part start here since will change in log.debug('error putting encrypted part', { error });
// final MPU object return cb(error);
size: numberPartSize, }
sseCryptoScheme: cipherBundle.cryptoScheme, const partResult = {
sseCipheredDataKey: cipherBundle.cipheredDataKey, key: partRetrievalInfo.key,
sseAlgorithm: cipherBundle.algorithm, dataStoreName: partRetrievalInfo.dataStoreName,
sseMasterKeyId: cipherBundle.masterKeyId, dataStoreETag: hashedStream.completedHash,
}; // Do not include part start here since will change in
locations.push(partResult); // final MPU object
return cb(); size: numberPartSize,
sseCryptoScheme: cipherBundle.cryptoScheme,
sseCipheredDataKey: cipherBundle.cipheredDataKey,
sseAlgorithm: cipherBundle.algorithm,
sseMasterKeyId: cipherBundle.masterKeyId,
};
locations.push(partResult);
return cb();
});
}); });
});
} }
// Copied object is not encrypted so just put it // Copied object is not encrypted so just put it
// without a cipherBundle // without a cipherBundle
return this.put(null, stream, numberPartSize, dataStoreContext, return this.put(null, stream, numberPartSize, dataStoreContext,
destBackendInfo, log, (error, partRetrievalInfo, hashedStream) => { destBackendInfo, log, (error, partRetrievalInfo, hashedStream) => {
if (error) { if (error) {
log.debug('error putting object part', { error }); log.debug('error putting object part', { error });
return cb(error); return cb(error);
} }
const partResult = { const partResult = {
key: partRetrievalInfo.key, key: partRetrievalInfo.key,
dataStoreName: partRetrievalInfo.dataStoreName, dataStoreName: partRetrievalInfo.dataStoreName,
dataStoreETag: hashedStream.completedHash, dataStoreETag: hashedStream.completedHash,
size: numberPartSize, size: numberPartSize,
}; };
locations.push(partResult); locations.push(partResult);
return cb(); return cb();
}); });
} }
_put(cipherBundle, value, valueSize, keyContext, backendInfo, log, cb) { _put(cipherBundle, value, valueSize, keyContext, backendInfo, log, cb) {
@ -930,14 +930,14 @@ class DataWrapper {
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
keyContext.cipherBundle = cipherBundle; keyContext.cipherBundle = cipherBundle;
return this.client.put(hashedStream, valueSize, keyContext, return this.client.put(hashedStream, valueSize, keyContext,
backendInfo, log.getSerializedUids(), (err, dataRetrievalInfo) => { backendInfo, log.getSerializedUids(), (err, dataRetrievalInfo) => {
if (err) { if (err) {
log.error('put error from datastore', log.error('put error from datastore',
{ error: err, implName: this.implName }); { error: err, implName: this.implName });
return cb(errors.ServiceUnavailable); return cb(errors.ServiceUnavailable);
} }
return cb(null, dataRetrievalInfo, hashedStream); return cb(null, dataRetrievalInfo, hashedStream);
}); });
} }
/* eslint-enable no-param-reassign */ /* eslint-enable no-param-reassign */
let writeStream = hashedStream; let writeStream = hashedStream;
@ -947,18 +947,18 @@ class DataWrapper {
} }
return this.client.put(writeStream, valueSize, keyContext, return this.client.put(writeStream, valueSize, keyContext,
log.getSerializedUids(), (err, key) => { log.getSerializedUids(), (err, key) => {
if (err) { if (err) {
log.error('put error from datastore', log.error('put error from datastore',
{ error: err, implName: this.implName }); { error: err, implName: this.implName });
return cb(errors.InternalError); return cb(errors.InternalError);
} }
const dataRetrievalInfo = { const dataRetrievalInfo = {
key, key,
dataStoreName: this.implName, dataStoreName: this.implName,
}; };
return cb(null, dataRetrievalInfo, hashedStream); return cb(null, dataRetrievalInfo, hashedStream);
}); });
} }
/** /**
@ -976,25 +976,25 @@ class DataWrapper {
return cb(errors.InternalError); return cb(errors.InternalError);
} }
return this.client.delete(objectGetInfo, log.getSerializedUids(), return this.client.delete(objectGetInfo, log.getSerializedUids(),
err => { err => {
if (err) { if (err) {
if (err.ObjNotFound) { if (err.ObjNotFound) {
log.info('no such key in datastore', { log.info('no such key in datastore', {
objectGetInfo, objectGetInfo,
implName: this.implName,
moreRetries: 'no',
});
return cb(err);
}
log.error('delete error from datastore', {
error: err,
implName: this.implName, implName: this.implName,
moreRetries: 'no', moreRetries: 'yes',
}); });
return cb(err); return this._retryDelete(objectGetInfo, log, count + 1, cb);
} }
log.error('delete error from datastore', { return cb();
error: err, });
implName: this.implName,
moreRetries: 'yes',
});
return this._retryDelete(objectGetInfo, log, count + 1, cb);
}
return cb();
});
} }
// This check is done because on a put, complete mpu or copy request to // This check is done because on a put, complete mpu or copy request to

View File

@ -30,7 +30,7 @@ function parseLC(config, vault) {
if (locationObj.details.connector.sproxyd) { if (locationObj.details.connector.sproxyd) {
clients[location] = new Sproxy({ clients[location] = new Sproxy({
bootstrap: locationObj.details.connector bootstrap: locationObj.details.connector
.sproxyd.bootstrap, .sproxyd.bootstrap,
// Might be undefined which is ok since there is a default // Might be undefined which is ok since there is a default
// set in sproxydclient if chordCos is undefined // set in sproxydclient if chordCos is undefined
chordCos: locationObj.details.connector.sproxyd.chordCos, chordCos: locationObj.details.connector.sproxyd.chordCos,
@ -60,7 +60,7 @@ function parseLC(config, vault) {
// keepalive config // keepalive config
const httpAgentConfig = const httpAgentConfig =
config.externalBackends[locationObj.type].httpAgent; config.externalBackends[locationObj.type].httpAgent;
// max sockets is infinity by default and expressed as null // max sockets is infinity by default and expressed as null
if (httpAgentConfig.maxSockets === null) { if (httpAgentConfig.maxSockets === null) {
httpAgentConfig.maxSockets = undefined; httpAgentConfig.maxSockets = undefined;
} }
@ -131,7 +131,7 @@ function parseLC(config, vault) {
const azureStorageEndpoint = config.getAzureEndpoint(location); const azureStorageEndpoint = config.getAzureEndpoint(location);
const proxyParams = const proxyParams =
backendUtils.proxyCompareUrl(azureStorageEndpoint) ? backendUtils.proxyCompareUrl(azureStorageEndpoint) ?
{} : config.outboundProxy; {} : config.outboundProxy;
const azureStorageCredentials = const azureStorageCredentials =
config.getAzureStorageCredentials(location); config.getAzureStorageCredentials(location);
clients[location] = new AzureClient({ clients[location] = new AzureClient({

View File

@ -52,26 +52,26 @@ class MultipleBackendGateway {
} }
} }
return client.put(writeStream, size, keyContext, reqUids, return client.put(writeStream, size, keyContext, reqUids,
(err, key, dataStoreVersionId, dataStoreSize, dataStoreMD5) => { (err, key, dataStoreVersionId, dataStoreSize, dataStoreMD5) => {
const log = createLogger(reqUids); const log = createLogger(reqUids);
log.debug('put to location', { controllingLocationConstraint }); log.debug('put to location', { controllingLocationConstraint });
if (err) { if (err) {
log.error('error from datastore', log.error('error from datastore',
{ error: err, dataStoreType: client.clientType }); { error: err, dataStoreType: client.clientType });
return callback(errors.ServiceUnavailable); return callback(errors.ServiceUnavailable);
} }
const dataRetrievalInfo = { const dataRetrievalInfo = {
key, key,
dataStoreName: controllingLocationConstraint, dataStoreName: controllingLocationConstraint,
dataStoreType: client.clientType, dataStoreType: client.clientType,
dataStoreVersionId, dataStoreVersionId,
dataStoreSize, dataStoreSize,
dataStoreMD5, dataStoreMD5,
}; };
return callback(null, dataRetrievalInfo); return callback(null, dataRetrievalInfo);
// sproxyd accepts keyschema, send as null so sproxyd generates key // sproxyd accepts keyschema, send as null so sproxyd generates key
// send metadata as param for AzureClient in Arsenal // send metadata as param for AzureClient in Arsenal
}, null, this.metadata); }, null, this.metadata);
} }
head(objectGetInfoArr, reqUids, callback) { head(objectGetInfoArr, reqUids, callback) {
@ -166,14 +166,14 @@ class MultipleBackendGateway {
}, () => { }, () => {
async.parallel([ async.parallel([
next => checkExternalBackend( next => checkExternalBackend(
this.clients, awsArray, 'aws_s3', flightCheckOnStartUp, this.clients, awsArray, 'aws_s3', flightCheckOnStartUp,
externalBackendHealthCheckInterval, next), externalBackendHealthCheckInterval, next),
next => checkExternalBackend( next => checkExternalBackend(
this.clients, azureArray, 'azure', flightCheckOnStartUp, this.clients, azureArray, 'azure', flightCheckOnStartUp,
externalBackendHealthCheckInterval, next), externalBackendHealthCheckInterval, next),
next => checkExternalBackend( next => checkExternalBackend(
this.clients, gcpArray, 'gcp', flightCheckOnStartUp, this.clients, gcpArray, 'gcp', flightCheckOnStartUp,
externalBackendHealthCheckInterval, next), externalBackendHealthCheckInterval, next),
], (errNull, externalResp) => { ], (errNull, externalResp) => {
const externalLocResults = []; const externalLocResults = [];
externalResp.forEach(resp => externalLocResults.push(...resp)); externalResp.forEach(resp => externalLocResults.push(...resp));
@ -185,19 +185,19 @@ class MultipleBackendGateway {
} }
createMPU(key, metaHeaders, bucketName, websiteRedirectHeader, createMPU(key, metaHeaders, bucketName, websiteRedirectHeader,
location, contentType, cacheControl, contentDisposition, location, contentType, cacheControl, contentDisposition,
contentEncoding, tagging, log, cb) { contentEncoding, tagging, log, cb) {
const client = this.clients[location]; const client = this.clients[location];
if (client.clientType === 'aws_s3' || client.clientType === 'gcp') { if (client.clientType === 'aws_s3' || client.clientType === 'gcp') {
return client.createMPU(key, metaHeaders, bucketName, return client.createMPU(key, metaHeaders, bucketName,
websiteRedirectHeader, contentType, cacheControl, websiteRedirectHeader, contentType, cacheControl,
contentDisposition, contentEncoding, tagging, log, cb); contentDisposition, contentEncoding, tagging, log, cb);
} }
return cb(); return cb();
} }
uploadPart(request, streamingV4Params, stream, size, location, key, uploadPart(request, streamingV4Params, stream, size, location, key,
uploadId, partNumber, bucketName, log, cb) { uploadId, partNumber, bucketName, log, cb) {
const client = this.clients[location]; const client = this.clients[location];
if (client.uploadPart) { if (client.uploadPart) {
@ -206,29 +206,29 @@ class MultipleBackendGateway {
return cb(err); return cb(err);
} }
return client.uploadPart(request, streamingV4Params, stream, return client.uploadPart(request, streamingV4Params, stream,
size, key, uploadId, partNumber, bucketName, log, size, key, uploadId, partNumber, bucketName, log,
(err, partInfo) => { (err, partInfo) => {
if (err) { if (err) {
// if error putting part, counter should be decremented // if error putting part, counter should be decremented
return this.locStorageCheckFn(location, -size, log, return this.locStorageCheckFn(location, -size, log,
error => { error => {
if (error) { if (error) {
log.error('Error decrementing location ' + log.error('Error decrementing location ' +
'metric following object PUT failure', 'metric following object PUT failure',
{ error: error.message }); { error: error.message });
} }
return cb(err); return cb(err);
}); });
} }
return cb(null, partInfo); return cb(null, partInfo);
}); });
}); });
} }
return cb(); return cb();
} }
listParts(key, uploadId, location, bucketName, partNumberMarker, maxParts, listParts(key, uploadId, location, bucketName, partNumberMarker, maxParts,
log, cb) { log, cb) {
const client = this.clients[location]; const client = this.clients[location];
if (client.listParts) { if (client.listParts) {
@ -239,7 +239,7 @@ class MultipleBackendGateway {
} }
completeMPU(key, uploadId, location, jsonList, mdInfo, bucketName, completeMPU(key, uploadId, location, jsonList, mdInfo, bucketName,
userMetadata, contentSettings, tagging, log, cb) { userMetadata, contentSettings, tagging, log, cb) {
const client = this.clients[location]; const client = this.clients[location];
if (client.completeMPU) { if (client.completeMPU) {
const args = [jsonList, mdInfo, key, uploadId, bucketName]; const args = [jsonList, mdInfo, key, uploadId, bucketName];
@ -291,40 +291,40 @@ class MultipleBackendGateway {
// NOTE: using copyObject only if copying object from one external // NOTE: using copyObject only if copying object from one external
// backend to the same external backend // backend to the same external backend
copyObject(request, destLocationConstraintName, externalSourceKey, copyObject(request, destLocationConstraintName, externalSourceKey,
sourceLocationConstraintName, storeMetadataParams, config, log, cb) { sourceLocationConstraintName, storeMetadataParams, config, log, cb) {
const client = this.clients[destLocationConstraintName]; const client = this.clients[destLocationConstraintName];
if (client.copyObject) { if (client.copyObject) {
return this.locStorageCheckFn(destLocationConstraintName, return this.locStorageCheckFn(destLocationConstraintName,
storeMetadataParams.size, log, err => { storeMetadataParams.size, log, err => {
if (err) {
cb(err);
}
return client.copyObject(request, destLocationConstraintName,
externalSourceKey, sourceLocationConstraintName,
storeMetadataParams, config, log,
(err, key, dataStoreVersionId) => {
const dataRetrievalInfo = {
key,
dataStoreName: destLocationConstraintName,
dataStoreType: client.clientType,
dataStoreVersionId,
};
if (err) { if (err) {
// if error copying obj, counter should be decremented cb(err);
return this.locStorageCheckFn(
destLocationConstraintName, -storeMetadataParams.size,
log, error => {
if (error) {
log.error('Error decrementing location ' +
'metric following object PUT failure',
{ error: error.message });
}
return cb(err);
});
} }
return cb(null, dataRetrievalInfo); return client.copyObject(request, destLocationConstraintName,
externalSourceKey, sourceLocationConstraintName,
storeMetadataParams, config, log,
(err, key, dataStoreVersionId) => {
const dataRetrievalInfo = {
key,
dataStoreName: destLocationConstraintName,
dataStoreType: client.clientType,
dataStoreVersionId,
};
if (err) {
// if error copying obj, counter should be decremented
return this.locStorageCheckFn(
destLocationConstraintName, -storeMetadataParams.size,
log, error => {
if (error) {
log.error('Error decrementing location ' +
'metric following object PUT failure',
{ error: error.message });
}
return cb(err);
});
}
return cb(null, dataRetrievalInfo);
});
}); });
});
} }
return cb(errors.NotImplemented return cb(errors.NotImplemented
.customizeDescription('Can not copy object from ' + .customizeDescription('Can not copy object from ' +
@ -332,15 +332,15 @@ class MultipleBackendGateway {
} }
uploadPartCopy(request, location, awsSourceKey, uploadPartCopy(request, location, awsSourceKey,
sourceLocationConstraintName, config, log, cb) { sourceLocationConstraintName, config, log, cb) {
const client = this.clients[location]; const client = this.clients[location];
if (client.uploadPartCopy) { if (client.uploadPartCopy) {
return client.uploadPartCopy(request, awsSourceKey, return client.uploadPartCopy(request, awsSourceKey,
sourceLocationConstraintName, config, sourceLocationConstraintName, config,
log, cb); log, cb);
} }
return cb(errors.NotImplemented.customizeDescription( return cb(errors.NotImplemented.customizeDescription(
'Can not copy object from ' + 'Can not copy object from ' +
`${client.clientType} to ${client.clientType}`)); `${client.clientType} to ${client.clientType}`));
} }
@ -348,7 +348,7 @@ class MultipleBackendGateway {
const client = this.clients[location]; const client = this.clients[location];
if (client.protectAzureBlocks) { if (client.protectAzureBlocks) {
return client.protectAzureBlocks(this.metadata, bucketName, return client.protectAzureBlocks(this.metadata, bucketName,
objectKey, location, log, cb); objectKey, location, log, cb);
} }
return cb(); return cb();
} }

View File

@ -11,7 +11,7 @@ const { createLogger, logHelper, removeQuotes, trimXMetaPrefix } =
const missingVerIdInternalError = errors.InternalError.customizeDescription( const missingVerIdInternalError = errors.InternalError.customizeDescription(
'Invalid state. Please ensure versioning is enabled ' + 'Invalid state. Please ensure versioning is enabled ' +
'in AWS for the location constraint and try again.' 'in AWS for the location constraint and try again.',
); );
class AwsClient { class AwsClient {
@ -36,35 +36,35 @@ class AwsClient {
// this request implicitly updates the endpoint for the location // this request implicitly updates the endpoint for the location
// the following code explcitly sets it to avoid surprises // the following code explcitly sets it to avoid surprises
this._client.getBucketLocation({ Bucket: this._awsBucketName }, this._client.getBucketLocation({ Bucket: this._awsBucketName },
(err, res) => { (err, res) => {
if (err && err.code !== 'AuthorizationHeaderMalformed') { if (err && err.code !== 'AuthorizationHeaderMalformed') {
this._logger.error('error during setup', { this._logger.error('error during setup', {
error: err, error: err,
method: 'AwsClient.setup', method: 'AwsClient.setup',
}); });
return cb(err); return cb(err);
} }
let region; let region;
if (err && err.code === 'AuthorizationHeaderMalformed') { if (err && err.code === 'AuthorizationHeaderMalformed') {
// set regional endpoint // set regional endpoint
region = err.region; region = err.region;
} else if (res) { } else if (res) {
region = res.LocationConstraint; region = res.LocationConstraint;
} }
this._client.config.update({ region }); this._client.config.update({ region });
const isAWS = this._s3Params.endpoint.endsWith('amazonaws.com'); const isAWS = this._s3Params.endpoint.endsWith('amazonaws.com');
if (region && isAWS) { if (region && isAWS) {
const endpoint = `s3.${region}.amazonaws.com`; const endpoint = `s3.${region}.amazonaws.com`;
this._logger.debug('setting regional endpoint', { this._logger.debug('setting regional endpoint', {
method: 'AwsClient.setup', method: 'AwsClient.setup',
region, region,
endpoint, endpoint,
}); });
this._client.endpoint = new AWS.Endpoint(endpoint); this._client.endpoint = new AWS.Endpoint(endpoint);
} }
return cb(); return cb();
}); });
} }
_createAwsKey(requestBucketName, requestObjectKey, _createAwsKey(requestBucketName, requestObjectKey,
@ -84,23 +84,23 @@ class AwsClient {
put(stream, size, keyContext, reqUids, callback) { put(stream, size, keyContext, reqUids, callback) {
const awsKey = this._createAwsKey(keyContext.bucketName, const awsKey = this._createAwsKey(keyContext.bucketName,
keyContext.objectKey, this._bucketMatch); keyContext.objectKey, this._bucketMatch);
const metaHeaders = trimXMetaPrefix(keyContext.metaHeaders); const metaHeaders = trimXMetaPrefix(keyContext.metaHeaders);
const log = createLogger(reqUids); const log = createLogger(reqUids);
const putCb = (err, data) => { const putCb = (err, data) => {
if (err) { if (err) {
logHelper(log, 'error', 'err from data backend', logHelper(log, 'error', 'err from data backend',
err, this._dataStoreName, this.clientType); err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
if (!data.VersionId && this._supportsVersioning) { if (!data.VersionId && this._supportsVersioning) {
logHelper(log, 'error', 'missing version id for data ' + logHelper(log, 'error', 'missing version id for data ' +
'backend object', missingVerIdInternalError, 'backend object', missingVerIdInternalError,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(missingVerIdInternalError); return callback(missingVerIdInternalError);
} }
const dataStoreVersionId = data.VersionId; const dataStoreVersionId = data.VersionId;
@ -180,8 +180,8 @@ class AwsClient {
Range: range ? `bytes=${range[0]}-${range[1]}` : null, Range: range ? `bytes=${range[0]}-${range[1]}` : null,
}).on('success', response => { }).on('success', response => {
log.trace(`${this.type} GET request response headers`, log.trace(`${this.type} GET request response headers`,
{ responseHeaders: response.httpResponse.headers, { responseHeaders: response.httpResponse.headers,
backendType: this.clientType }); backendType: this.clientType });
}); });
const stream = request.createReadStream(); const stream = request.createReadStream();
@ -202,8 +202,8 @@ class AwsClient {
logLevel = 'error'; logLevel = 'error';
} }
logHelper(log, logLevel, logHelper(log, logLevel,
`error streaming data from ${this.type}`, `error streaming data from ${this.type}`,
err, this._dataStoreName, this.clientType); err, this._dataStoreName, this.clientType);
}); });
// Always call the callback asynchronously: the caller may // Always call the callback asynchronously: the caller may
// destroy the stream with destroy(), which MUST be // destroy the stream with destroy(), which MUST be
@ -232,8 +232,8 @@ class AwsClient {
return callback(); return callback();
} }
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
return callback(); return callback();
@ -243,39 +243,39 @@ class AwsClient {
healthcheck(location, callback) { healthcheck(location, callback) {
const awsResp = {}; const awsResp = {};
this._client.headBucket({ Bucket: this._awsBucketName }, this._client.headBucket({ Bucket: this._awsBucketName },
err => { err => {
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
if (err) {
awsResp[location] = { error: err, external: true };
return callback(null, awsResp);
}
if (!this._supportsVersioning) {
awsResp[location] = {
message: 'Congrats! You own the bucket',
};
return callback(null, awsResp);
}
return this._client.getBucketVersioning({
Bucket: this._awsBucketName },
(err, data) => {
if (err) { if (err) {
awsResp[location] = { error: err, external: true }; awsResp[location] = { error: err, external: true };
} else if (!data.Status || return callback(null, awsResp);
data.Status === 'Suspended') { }
if (!this._supportsVersioning) {
awsResp[location] = { awsResp[location] = {
versioningStatus: data.Status,
error: 'Versioning must be enabled',
external: true,
};
} else {
awsResp[location] = {
versioningStatus: data.Status,
message: 'Congrats! You own the bucket', message: 'Congrats! You own the bucket',
}; };
return callback(null, awsResp);
} }
return callback(null, awsResp); return this._client.getBucketVersioning({
Bucket: this._awsBucketName },
(err, data) => {
if (err) {
awsResp[location] = { error: err, external: true };
} else if (!data.Status ||
data.Status === 'Suspended') {
awsResp[location] = {
versioningStatus: data.Status,
error: 'Versioning must be enabled',
external: true,
};
} else {
awsResp[location] = {
versioningStatus: data.Status,
message: 'Congrats! You own the bucket',
};
}
return callback(null, awsResp);
});
}); });
});
} }
createMPU(key, metaHeaders, bucketName, websiteRedirectHeader, contentType, createMPU(key, metaHeaders, bucketName, websiteRedirectHeader, contentType,
@ -304,10 +304,10 @@ class AwsClient {
return this._client.createMultipartUpload(params, (err, mpuResObj) => { return this._client.createMultipartUpload(params, (err, mpuResObj) => {
if (err) { if (err) {
logHelper(log, 'error', 'err from data backend', logHelper(log, 'error', 'err from data backend',
err, this._dataStoreName, this.clientType); err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
return callback(null, mpuResObj); return callback(null, mpuResObj);
@ -315,7 +315,7 @@ class AwsClient {
} }
uploadPart(request, streamingV4Params, stream, size, key, uploadId, uploadPart(request, streamingV4Params, stream, size, key, uploadId,
partNumber, bucketName, log, callback) { partNumber, bucketName, log, callback) {
let hashedStream = stream; let hashedStream = stream;
if (request) { if (request) {
const partStream = prepareStream(request, streamingV4Params, const partStream = prepareStream(request, streamingV4Params,
@ -335,7 +335,7 @@ class AwsClient {
'on uploadPart', err, this._dataStoreName, this.clientType); 'on uploadPart', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
// Because we manually add quotes to ETag later, remove quotes here // Because we manually add quotes to ETag later, remove quotes here
@ -352,7 +352,7 @@ class AwsClient {
} }
listParts(key, uploadId, bucketName, partNumberMarker, maxParts, log, listParts(key, uploadId, bucketName, partNumberMarker, maxParts, log,
callback) { callback) {
const awsBucket = this._awsBucketName; const awsBucket = this._awsBucketName;
const awsKey = this._createAwsKey(bucketName, key, this._bucketMatch); const awsKey = this._createAwsKey(bucketName, key, this._bucketMatch);
const params = { Bucket: awsBucket, Key: awsKey, UploadId: uploadId, const params = { Bucket: awsBucket, Key: awsKey, UploadId: uploadId,
@ -360,10 +360,10 @@ class AwsClient {
return this._client.listParts(params, (err, partList) => { return this._client.listParts(params, (err, partList) => {
if (err) { if (err) {
logHelper(log, 'error', 'err from data backend on listPart', logHelper(log, 'error', 'err from data backend on listPart',
err, this._dataStoreName, this.clientType); err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
// build storedParts object to mimic Scality S3 backend returns // build storedParts object to mimic Scality S3 backend returns
@ -424,47 +424,47 @@ class AwsClient {
}; };
const completeObjData = { key: awsKey }; const completeObjData = { key: awsKey };
return this._client.completeMultipartUpload(mpuParams, return this._client.completeMultipartUpload(mpuParams,
(err, completeMpuRes) => { (err, completeMpuRes) => {
if (err) {
if (mpuError[err.code]) {
logHelper(log, 'trace', 'err from data backend on ' +
'completeMPU', err, this._dataStoreName, this.clientType);
return callback(errors[err.code]);
}
logHelper(log, 'error', 'err from data backend on ' +
'completeMPU', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`)
);
}
if (!completeMpuRes.VersionId && this._supportsVersioning) {
logHelper(log, 'error', 'missing version id for data ' +
'backend object', missingVerIdInternalError,
this._dataStoreName, this.clientType);
return callback(missingVerIdInternalError);
}
// need to get content length of new object to store
// in our metadata
return this._client.headObject({ Bucket: awsBucket, Key: awsKey },
(err, objHeaders) => {
if (err) { if (err) {
logHelper(log, 'trace', 'err from data backend on ' + if (mpuError[err.code]) {
'headObject', err, this._dataStoreName, this.clientType); logHelper(log, 'trace', 'err from data backend on ' +
'completeMPU', err, this._dataStoreName, this.clientType);
return callback(errors[err.code]);
}
logHelper(log, 'error', 'err from data backend on ' +
'completeMPU', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
// remove quotes from eTag because they're added later if (!completeMpuRes.VersionId && this._supportsVersioning) {
completeObjData.eTag = completeMpuRes.ETag logHelper(log, 'error', 'missing version id for data ' +
.substring(1, completeMpuRes.ETag.length - 1); 'backend object', missingVerIdInternalError,
completeObjData.dataStoreVersionId = completeMpuRes.VersionId; this._dataStoreName, this.clientType);
completeObjData.contentLength = return callback(missingVerIdInternalError);
}
// need to get content length of new object to store
// in our metadata
return this._client.headObject({ Bucket: awsBucket, Key: awsKey },
(err, objHeaders) => {
if (err) {
logHelper(log, 'trace', 'err from data backend on ' +
'headObject', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`),
);
}
// remove quotes from eTag because they're added later
completeObjData.eTag = completeMpuRes.ETag
.substring(1, completeMpuRes.ETag.length - 1);
completeObjData.dataStoreVersionId = completeMpuRes.VersionId;
completeObjData.contentLength =
Number.parseInt(objHeaders.ContentLength, 10); Number.parseInt(objHeaders.ContentLength, 10);
return callback(null, completeObjData); return callback(null, completeObjData);
});
}); });
});
} }
abortMPU(key, uploadId, bucketName, log, callback) { abortMPU(key, uploadId, bucketName, log, callback) {
@ -480,8 +480,8 @@ class AwsClient {
'using the same uploadId.', err, this._dataStoreName, 'using the same uploadId.', err, this._dataStoreName,
this.clientType); this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
return callback(); return callback();
@ -507,10 +507,10 @@ class AwsClient {
if (err) { if (err) {
logHelper(log, 'error', 'error from data backend on ' + logHelper(log, 'error', 'error from data backend on ' +
'putObjectTagging', err, 'putObjectTagging', err,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
return callback(); return callback();
@ -532,19 +532,19 @@ class AwsClient {
'deleteObjectTagging', err, 'deleteObjectTagging', err,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
return callback(); return callback();
}); });
} }
copyObject(request, destLocationConstraintName, sourceKey, copyObject(request, destLocationConstraintName, sourceKey,
sourceLocationConstraintName, storeMetadataParams, config, log, callback) { sourceLocationConstraintName, storeMetadataParams, config, log, callback) {
const destBucketName = request.bucketName; const destBucketName = request.bucketName;
const destObjectKey = request.objectKey; const destObjectKey = request.objectKey;
const destAwsKey = this._createAwsKey(destBucketName, destObjectKey, const destAwsKey = this._createAwsKey(destBucketName, destObjectKey,
this._bucketMatch); this._bucketMatch);
const sourceAwsBucketName = const sourceAwsBucketName =
config.getAwsBucketName(sourceLocationConstraintName); config.getAwsBucketName(sourceLocationConstraintName);
@ -569,15 +569,15 @@ class AwsClient {
`${sourceAwsBucketName} ${this.type} bucket`, err, `${sourceAwsBucketName} ${this.type} bucket`, err,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.AccessDenied return callback(errors.AccessDenied
.customizeDescription('Error: Unable to access ' + .customizeDescription('Error: Unable to access ' +
`${sourceAwsBucketName} ${this.type} bucket`) `${sourceAwsBucketName} ${this.type} bucket`),
); );
} }
logHelper(log, 'error', 'error from data backend on ' + logHelper(log, 'error', 'error from data backend on ' +
'copyObject', err, this._dataStoreName, this.clientType); 'copyObject', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
if (!copyResult.VersionId && this._supportsVersioning) { if (!copyResult.VersionId && this._supportsVersioning) {
@ -590,7 +590,7 @@ class AwsClient {
if (err || !data.VersionId) { if (err || !data.VersionId) {
logHelper(log, 'error', 'missing version id for data ' + logHelper(log, 'error', 'missing version id for data ' +
'backend object', missingVerIdInternalError, 'backend object', missingVerIdInternalError,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(missingVerIdInternalError); return callback(missingVerIdInternalError);
} }
return callback(null, destAwsKey, data.VersionId); return callback(null, destAwsKey, data.VersionId);
@ -600,11 +600,11 @@ class AwsClient {
}); });
} }
uploadPartCopy(request, awsSourceKey, sourceLocationConstraintName, uploadPartCopy(request, awsSourceKey, sourceLocationConstraintName,
config, log, callback) { config, log, callback) {
const destBucketName = request.bucketName; const destBucketName = request.bucketName;
const destObjectKey = request.objectKey; const destObjectKey = request.objectKey;
const destAwsKey = this._createAwsKey(destBucketName, destObjectKey, const destAwsKey = this._createAwsKey(destBucketName, destObjectKey,
this._bucketMatch); this._bucketMatch);
const sourceAwsBucketName = const sourceAwsBucketName =
config.getAwsBucketName(sourceLocationConstraintName); config.getAwsBucketName(sourceLocationConstraintName);
@ -628,15 +628,15 @@ class AwsClient {
`${sourceAwsBucketName} AWS bucket`, err, `${sourceAwsBucketName} AWS bucket`, err,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.AccessDenied return callback(errors.AccessDenied
.customizeDescription('Error: Unable to access ' + .customizeDescription('Error: Unable to access ' +
`${sourceAwsBucketName} AWS bucket`) `${sourceAwsBucketName} AWS bucket`),
); );
} }
logHelper(log, 'error', 'error from data backend on ' + logHelper(log, 'error', 'error from data backend on ' +
'uploadPartCopy', err, this._dataStoreName, this.clientType); 'uploadPartCopy', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.type}: ${err.message}`) `${this.type}: ${err.message}`),
); );
} }
const eTag = removeQuotes(res.CopyPartResult.ETag); const eTag = removeQuotes(res.CopyPartResult.ETag);

View File

@ -51,7 +51,7 @@ class AzureClient {
if (log) { if (log) {
log.error('error thrown by Azure Storage Client Library', log.error('error thrown by Azure Storage Client Library',
{ error: err.message, stack: err.stack, s3Method, { error: err.message, stack: err.stack, s3Method,
azureMethod, dataStoreName: this._dataStoreName }); azureMethod, dataStoreName: this._dataStoreName });
} }
cb(error.customizeDescription('Error from Azure ' + cb(error.customizeDescription('Error from Azure ' +
`method: ${azureMethod} on ${s3Method} S3 call: ` + `method: ${azureMethod} on ${s3Method} S3 call: ` +
@ -82,7 +82,7 @@ class AzureClient {
// same key name. If it does, do not allow put or delete because Azure // same key name. If it does, do not allow put or delete because Azure
// will delete all blocks with same key name // will delete all blocks with same key name
protectAzureBlocks(metadata, bucketName, objectKey, dataStoreName, protectAzureBlocks(metadata, bucketName, objectKey, dataStoreName,
log, cb) { log, cb) {
const mpuBucketName = `${constants.mpuBucketPrefix}${bucketName}`; const mpuBucketName = `${constants.mpuBucketPrefix}${bucketName}`;
const splitter = constants.splitter; const splitter = constants.splitter;
const listingParams = { const listingParams = {
@ -93,23 +93,23 @@ class AzureClient {
}; };
return metadata.listMultipartUploads(mpuBucketName, listingParams, return metadata.listMultipartUploads(mpuBucketName, listingParams,
log, (err, mpuList) => { log, (err, mpuList) => {
if (err && !err.NoSuchBucket) { if (err && !err.NoSuchBucket) {
log.error('Error listing MPUs for Azure delete', log.error('Error listing MPUs for Azure delete',
{ error: err, dataStoreName }); { error: err, dataStoreName });
return cb(errors.ServiceUnavailable); return cb(errors.ServiceUnavailable);
} }
if (mpuList && mpuList.Uploads && mpuList.Uploads.length > 0) { if (mpuList && mpuList.Uploads && mpuList.Uploads.length > 0) {
const error = errors.MPUinProgress; const error = errors.MPUinProgress;
log.error('Error: cannot put/delete object to Azure with ' + log.error('Error: cannot put/delete object to Azure with ' +
'same key name as ongoing MPU on Azure', 'same key name as ongoing MPU on Azure',
{ error, dataStoreName }); { error, dataStoreName });
return cb(error); return cb(error);
} }
// If listMultipartUploads returns a NoSuchBucket error or the // If listMultipartUploads returns a NoSuchBucket error or the
// mpu list is empty, there are no conflicting MPUs, so continue // mpu list is empty, there are no conflicting MPUs, so continue
return cb(); return cb();
}); });
} }
toObjectGetInfo(objectKey, bucketName) { toObjectGetInfo(objectKey, bucketName) {
@ -123,52 +123,52 @@ class AzureClient {
const log = createLogger(reqUids); const log = createLogger(reqUids);
// before blob is put, make sure there is no ongoing MPU with same key // before blob is put, make sure there is no ongoing MPU with same key
this.protectAzureBlocks(metadata, keyContext.bucketName, this.protectAzureBlocks(metadata, keyContext.bucketName,
keyContext.objectKey, this._dataStoreName, log, err => { keyContext.objectKey, this._dataStoreName, log, err => {
// if error returned, there is ongoing MPU, so do not put // if error returned, there is ongoing MPU, so do not put
if (err) { if (err) {
return callback(err.customizeDescription( return callback(err.customizeDescription(
`Error putting object to Azure: ${err.message}`)); `Error putting object to Azure: ${err.message}`));
} }
const azureKey = this._createAzureKey(keyContext.bucketName, const azureKey = this._createAzureKey(keyContext.bucketName,
keyContext.objectKey, this._bucketMatch); keyContext.objectKey, this._bucketMatch);
const options = { const options = {
metadata: translateAzureMetaHeaders(keyContext.metaHeaders, metadata: translateAzureMetaHeaders(keyContext.metaHeaders,
keyContext.tagging), keyContext.tagging),
contentSettings: { contentSettings: {
contentType: keyContext.contentType || undefined, contentType: keyContext.contentType || undefined,
cacheControl: keyContext.cacheControl || undefined, cacheControl: keyContext.cacheControl || undefined,
contentDisposition: keyContext.contentDisposition || contentDisposition: keyContext.contentDisposition ||
undefined, undefined,
contentEncoding: keyContext.contentEncoding || undefined, contentEncoding: keyContext.contentEncoding || undefined,
}, },
}; };
if (size === 0) { if (size === 0) {
return this._errorWrapper('put', 'createBlockBlobFromText', return this._errorWrapper('put', 'createBlockBlobFromText',
[this._azureContainerName, azureKey, '', options, [this._azureContainerName, azureKey, '', options,
err => { err => {
if (err) { if (err) {
logHelper(log, 'error', 'err from Azure PUT data ' + logHelper(log, 'error', 'err from Azure PUT data ' +
'backend', err, this._dataStoreName); 'backend', err, this._dataStoreName);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`Azure: ${err.message}`)); `Azure: ${err.message}`));
} }
return callback(null, azureKey); return callback(null, azureKey);
}], log, callback); }], log, callback);
} }
return this._errorWrapper('put', 'createBlockBlobFromStream', return this._errorWrapper('put', 'createBlockBlobFromStream',
[this._azureContainerName, azureKey, stream, size, options, [this._azureContainerName, azureKey, stream, size, options,
err => { err => {
if (err) { if (err) {
logHelper(log, 'error', 'err from Azure PUT data ' + logHelper(log, 'error', 'err from Azure PUT data ' +
'backend', err, this._dataStoreName); 'backend', err, this._dataStoreName);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`Azure: ${err.message}`)); `Azure: ${err.message}`));
} }
return callback(null, azureKey); return callback(null, azureKey);
}], log, callback); }], log, callback);
}); });
} }
head(objectGetInfo, reqUids, callback) { head(objectGetInfo, reqUids, callback) {
@ -176,25 +176,25 @@ class AzureClient {
const { key, azureStreamingOptions } = objectGetInfo; const { key, azureStreamingOptions } = objectGetInfo;
return this._errorWrapper('head', 'getBlobProperties', return this._errorWrapper('head', 'getBlobProperties',
[this._azureContainerName, key, azureStreamingOptions, [this._azureContainerName, key, azureStreamingOptions,
(err, data) => { (err, data) => {
if (err) { if (err) {
let logLevel; let logLevel;
let retError; let retError;
if (err.code === 'NotFound') { if (err.code === 'NotFound') {
logLevel = 'info'; logLevel = 'info';
retError = errors.LocationNotFound; retError = errors.LocationNotFound;
} else { } else {
logLevel = 'error'; logLevel = 'error';
retError = errors.ServiceUnavailable retError = errors.ServiceUnavailable
.customizeDescription( .customizeDescription(
`Error returned from Azure: ${err.message}`); `Error returned from Azure: ${err.message}`);
}
logHelper(log, logLevel, 'err from Azure HEAD data backend',
err, this._dataStoreName);
return callback(retError);
} }
logHelper(log, logLevel, 'err from Azure HEAD data backend', return callback(null, data);
err, this._dataStoreName); }], log, callback);
return callback(retError);
}
return callback(null, data);
}], log, callback);
} }
get(objectGetInfo, range, reqUids, callback) { get(objectGetInfo, range, reqUids, callback) {
@ -213,14 +213,14 @@ class AzureClient {
} }
this._errorWrapper('get', 'getBlobToStream', this._errorWrapper('get', 'getBlobToStream',
[this._azureContainerName, key, response, streamingOptions, [this._azureContainerName, key, response, streamingOptions,
err => { err => {
if (err) { if (err) {
logHelper(log, 'error', 'err from Azure GET data backend', logHelper(log, 'error', 'err from Azure GET data backend',
err, this._dataStoreName); err, this._dataStoreName);
return callback(errors.ServiceUnavailable); return callback(errors.ServiceUnavailable);
} }
return callback(null, response); return callback(null, response);
}], log, callback); }], log, callback);
} }
delete(objectGetInfo, reqUids, callback) { delete(objectGetInfo, reqUids, callback) {
@ -234,20 +234,20 @@ class AzureClient {
} }
return this._errorWrapper('delete', 'deleteBlobIfExists', return this._errorWrapper('delete', 'deleteBlobIfExists',
[this._azureContainerName, key, options, [this._azureContainerName, key, options,
err => { err => {
if (err && err.statusCode === 412) { if (err && err.statusCode === 412) {
return callback(errors.PreconditionFailed); return callback(errors.PreconditionFailed);
} }
if (err) { if (err) {
const log = createLogger(reqUids); const log = createLogger(reqUids);
logHelper(log, 'error', 'error deleting object from ' + logHelper(log, 'error', 'error deleting object from ' +
'Azure datastore', err, this._dataStoreName); 'Azure datastore', err, this._dataStoreName);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`Azure: ${err.message}`)); `Azure: ${err.message}`));
} }
return callback(); return callback();
}], log, callback); }], log, callback);
} }
healthcheck(location, callback, flightCheckOnStartUp) { healthcheck(location, callback, flightCheckOnStartUp) {
@ -271,7 +271,7 @@ class AzureClient {
} }
uploadPart(request, streamingV4Params, partStream, size, key, uploadId, uploadPart(request, streamingV4Params, partStream, size, key, uploadId,
partNumber, bucket, log, callback) { partNumber, bucket, log, callback) {
const azureKey = this._createAzureKey(bucket, key, this._bucketMatch); const azureKey = this._createAzureKey(bucket, key, this._bucketMatch);
const params = { bucketName: this._azureContainerName, const params = { bucketName: this._azureContainerName,
partNumber, size, objectKey: azureKey, uploadId }; partNumber, size, objectKey: azureKey, uploadId };
@ -299,27 +299,27 @@ class AzureClient {
if (size <= azureMpuUtils.maxSubPartSize) { if (size <= azureMpuUtils.maxSubPartSize) {
const errorWrapperFn = this._errorWrapper.bind(this); const errorWrapperFn = this._errorWrapper.bind(this);
return azureMpuUtils.putSinglePart(errorWrapperFn, return azureMpuUtils.putSinglePart(errorWrapperFn,
stream, params, this._dataStoreName, log, (err, dataStoreETag) => { stream, params, this._dataStoreName, log, (err, dataStoreETag) => {
if (err) {
return callback(err);
}
dataRetrievalInfo.dataStoreETag = dataStoreETag;
return callback(null, dataRetrievalInfo);
});
}
const errorWrapperFn = this._errorWrapper.bind(this);
return azureMpuUtils.putSubParts(errorWrapperFn, stream,
params, this._dataStoreName, log, (err, dataStoreETag) => {
if (err) { if (err) {
return callback(err); return callback(err);
} }
dataRetrievalInfo.dataStoreETag = dataStoreETag; dataRetrievalInfo.dataStoreETag = dataStoreETag;
return callback(null, dataRetrievalInfo); return callback(null, dataRetrievalInfo);
}); });
}
const errorWrapperFn = this._errorWrapper.bind(this);
return azureMpuUtils.putSubParts(errorWrapperFn, stream,
params, this._dataStoreName, log, (err, dataStoreETag) => {
if (err) {
return callback(err);
}
dataRetrievalInfo.dataStoreETag = dataStoreETag;
return callback(null, dataRetrievalInfo);
});
} }
completeMPU(jsonList, mdInfo, key, uploadId, bucket, metaHeaders, completeMPU(jsonList, mdInfo, key, uploadId, bucket, metaHeaders,
contentSettings, tagging, log, callback) { contentSettings, tagging, log, callback) {
const azureKey = this._createAzureKey(bucket, key, this._bucketMatch); const azureKey = this._createAzureKey(bucket, key, this._bucketMatch);
const commitList = { const commitList = {
UncommittedBlocks: jsonList.uncommittedBlocks || [], UncommittedBlocks: jsonList.uncommittedBlocks || [],
@ -345,20 +345,20 @@ class AzureClient {
}; };
return this._errorWrapper('completeMPU', 'commitBlocks', return this._errorWrapper('completeMPU', 'commitBlocks',
[this._azureContainerName, azureKey, commitList, options, [this._azureContainerName, azureKey, commitList, options,
err => { err => {
if (err) { if (err) {
logHelper(log, 'error', 'err completing MPU on Azure ' + logHelper(log, 'error', 'err completing MPU on Azure ' +
'datastore', err, this._dataStoreName); 'datastore', err, this._dataStoreName);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`Azure: ${err.message}`)); `Azure: ${err.message}`));
} }
const completeObjData = { const completeObjData = {
key: azureKey, key: azureKey,
filteredPartsObj, filteredPartsObj,
}; };
return callback(null, completeObjData); return callback(null, completeObjData);
}], log, callback); }], log, callback);
} }
objectPutTagging(key, bucket, objectMD, log, callback) { objectPutTagging(key, bucket, objectMD, log, callback) {
@ -367,14 +367,14 @@ class AzureClient {
azureMD.tags = JSON.stringify(objectMD.tags); azureMD.tags = JSON.stringify(objectMD.tags);
this._errorWrapper('objectPutTagging', 'setBlobMetadata', this._errorWrapper('objectPutTagging', 'setBlobMetadata',
[this._azureContainerName, azureKey, azureMD, [this._azureContainerName, azureKey, azureMD,
err => { err => {
if (err) { if (err) {
logHelper(log, 'error', 'err putting object tags to ' + logHelper(log, 'error', 'err putting object tags to ' +
'Azure backend', err, this._dataStoreName); 'Azure backend', err, this._dataStoreName);
return callback(errors.ServiceUnavailable); return callback(errors.ServiceUnavailable);
} }
return callback(); return callback();
}], log, callback); }], log, callback);
} }
objectDeleteTagging(key, bucketName, objectMD, log, callback) { objectDeleteTagging(key, bucketName, objectMD, log, callback) {
@ -382,27 +382,27 @@ class AzureClient {
const azureMD = this._getMetaHeaders(objectMD); const azureMD = this._getMetaHeaders(objectMD);
this._errorWrapper('objectDeleteTagging', 'setBlobMetadata', this._errorWrapper('objectDeleteTagging', 'setBlobMetadata',
[this._azureContainerName, azureKey, azureMD, [this._azureContainerName, azureKey, azureMD,
err => { err => {
if (err) { if (err) {
logHelper(log, 'error', 'err putting object tags to ' + logHelper(log, 'error', 'err putting object tags to ' +
'Azure backend', err, this._dataStoreName); 'Azure backend', err, this._dataStoreName);
return callback(errors.ServiceUnavailable); return callback(errors.ServiceUnavailable);
} }
return callback(); return callback();
}], log, callback); }], log, callback);
} }
copyObject(request, destLocationConstraintName, sourceKey, copyObject(request, destLocationConstraintName, sourceKey,
sourceLocationConstraintName, storeMetadataParams, config, log, callback) { sourceLocationConstraintName, storeMetadataParams, config, log, callback) {
const destContainerName = request.bucketName; const destContainerName = request.bucketName;
const destObjectKey = request.objectKey; const destObjectKey = request.objectKey;
const destAzureKey = this._createAzureKey(destContainerName, const destAzureKey = this._createAzureKey(destContainerName,
destObjectKey, this._bucketMatch); destObjectKey, this._bucketMatch);
const sourceContainerName = const sourceContainerName =
config.locationConstraints[sourceLocationConstraintName] config.locationConstraints[sourceLocationConstraintName]
.details.azureContainerName; .details.azureContainerName;
let options; let options;
if (storeMetadataParams.metaHeaders) { if (storeMetadataParams.metaHeaders) {
@ -413,7 +413,7 @@ class AzureClient {
this._errorWrapper('copyObject', 'startCopyBlob', this._errorWrapper('copyObject', 'startCopyBlob',
[`${this._azureStorageEndpoint}` + [`${this._azureStorageEndpoint}` +
`${sourceContainerName}/${sourceKey}`, `${sourceContainerName}/${sourceKey}`,
this._azureContainerName, destAzureKey, options, this._azureContainerName, destAzureKey, options,
(err, res) => { (err, res) => {
if (err) { if (err) {
if (err.code === 'CannotVerifyCopySource') { if (err.code === 'CannotVerifyCopySource') {
@ -421,36 +421,36 @@ class AzureClient {
`${sourceContainerName} Azure Container`, err, `${sourceContainerName} Azure Container`, err,
this._dataStoreName); this._dataStoreName);
return callback(errors.AccessDenied return callback(errors.AccessDenied
.customizeDescription('Error: Unable to access ' + .customizeDescription('Error: Unable to access ' +
`${sourceContainerName} Azure Container`) `${sourceContainerName} Azure Container`),
); );
} }
logHelper(log, 'error', 'error from data backend on ' + logHelper(log, 'error', 'error from data backend on ' +
'copyObject', err, this._dataStoreName); 'copyObject', err, this._dataStoreName);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`AWS: ${err.message}`) `AWS: ${err.message}`),
); );
} }
if (res.copy.status === 'pending') { if (res.copy.status === 'pending') {
logHelper(log, 'error', 'Azure copy status is pending', logHelper(log, 'error', 'Azure copy status is pending',
err, this._dataStoreName); err, this._dataStoreName);
const copyId = res.copy.id; const copyId = res.copy.id;
this._client.abortCopyBlob(this._azureContainerName, this._client.abortCopyBlob(this._azureContainerName,
destAzureKey, copyId, err => { destAzureKey, copyId, err => {
if (err) { if (err) {
logHelper(log, 'error', 'error from data backend ' + logHelper(log, 'error', 'error from data backend ' +
'on abortCopyBlob', err, this._dataStoreName); 'on abortCopyBlob', err, this._dataStoreName);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`AWS on abortCopyBlob: ${err.message}`) `AWS on abortCopyBlob: ${err.message}`),
);
}
return callback(errors.InvalidObjectState
.customizeDescription('Error: Azure copy status was ' +
'pending. It has been aborted successfully'),
); );
} });
return callback(errors.InvalidObjectState
.customizeDescription('Error: Azure copy status was ' +
'pending. It has been aborted successfully')
);
});
} }
return callback(null, destAzureKey); return callback(null, destAzureKey);
}], log, callback); }], log, callback);

View File

@ -24,7 +24,7 @@ class MpuHelper {
const handleFunc = (fnName, params, retry, callback) => { const handleFunc = (fnName, params, retry, callback) => {
const timeout = backoff.duration(); const timeout = backoff.duration();
return setTimeout((params, cb) => return setTimeout((params, cb) =>
this.service[fnName](params, cb), timeout, params, this.service[fnName](params, cb), timeout, params,
(err, res) => { (err, res) => {
if (err) { if (err) {
if (err.statusCode === 429 || err.code === 429) { if (err.statusCode === 429 || err.code === 429) {
@ -90,53 +90,53 @@ class MpuHelper {
splitMerge(params, partList, level, callback) { splitMerge(params, partList, level, callback) {
// create composition of slices from the partList array // create composition of slices from the partList array
return async.mapLimit(eachSlice.call(partList, 32), return async.mapLimit(eachSlice.call(partList, 32),
this.service._maxConcurrent, this.service._maxConcurrent,
(infoParts, cb) => { (infoParts, cb) => {
const mpuPartList = infoParts.Parts.map(item => const mpuPartList = infoParts.Parts.map(item =>
({ PartName: item.PartName })); ({ PartName: item.PartName }));
const partNumber = infoParts.PartNumber; const partNumber = infoParts.PartNumber;
const tmpKey = const tmpKey =
createMpuKey(params.Key, params.UploadId, partNumber, level); createMpuKey(params.Key, params.UploadId, partNumber, level);
const mergedObject = { PartName: tmpKey }; const mergedObject = { PartName: tmpKey };
if (mpuPartList.length < 2) { if (mpuPartList.length < 2) {
logger.trace( logger.trace(
'splitMerge: parts are fewer than 2, copy instead'); 'splitMerge: parts are fewer than 2, copy instead');
// else just perform a copy // else just perform a copy
const copyParams = { const copyParams = {
Bucket: params.MPU,
Key: tmpKey,
CopySource: `${params.MPU}/${mpuPartList[0].PartName}`,
};
return this.service.copyObject(copyParams, (err, res) => {
if (err) {
logHelper(logger, 'error',
'error in splitMerge - copyObject', err);
return cb(err);
}
mergedObject.VersionId = res.VersionId;
mergedObject.ETag = res.ETag;
return cb(null, mergedObject);
});
}
const composeParams = {
Bucket: params.MPU, Bucket: params.MPU,
Key: tmpKey, Key: tmpKey,
CopySource: `${params.MPU}/${mpuPartList[0].PartName}`, MultipartUpload: { Parts: mpuPartList },
}; };
return this.service.copyObject(copyParams, (err, res) => { return this.retryCompose(composeParams, (err, res) => {
if (err) { if (err) {
logHelper(logger, 'error',
'error in splitMerge - copyObject', err);
return cb(err); return cb(err);
} }
mergedObject.VersionId = res.VersionId; mergedObject.VersionId = res.VersionId;
mergedObject.ETag = res.ETag; mergedObject.ETag = res.ETag;
return cb(null, mergedObject); return cb(null, mergedObject);
}); });
} }, (err, res) => {
const composeParams = {
Bucket: params.MPU,
Key: tmpKey,
MultipartUpload: { Parts: mpuPartList },
};
return this.retryCompose(composeParams, (err, res) => {
if (err) { if (err) {
return cb(err); return callback(err);
} }
mergedObject.VersionId = res.VersionId; return callback(null, res.length);
mergedObject.ETag = res.ETag;
return cb(null, mergedObject);
}); });
}, (err, res) => {
if (err) {
return callback(err);
}
return callback(null, res.length);
});
} }
/** /**
@ -284,7 +284,7 @@ class MpuHelper {
if (err) { if (err) {
logHelper(logger, 'error', 'error in ' + logHelper(logger, 'error', 'error in ' +
'createMultipartUpload - final copyObject', 'createMultipartUpload - final copyObject',
err); err);
return next(err); return next(err);
} }
const mpuResult = { const mpuResult = {
@ -301,7 +301,7 @@ class MpuHelper {
if (err) { if (err) {
logHelper(logger, 'error', 'error in ' + logHelper(logger, 'error', 'error in ' +
'createMultipartUpload - final head object', 'createMultipartUpload - final head object',
err); err);
return next(err); return next(err);
} }
mpuResult.ContentLength = res.ContentLength; mpuResult.ContentLength = res.ContentLength;

View File

@ -70,7 +70,7 @@ class GcpManagedUpload {
if (this.body instanceof stream) { if (this.body instanceof stream) {
assert.strictEqual(typeof this.totalBytes, 'number', assert.strictEqual(typeof this.totalBytes, 'number',
errors.MissingContentLength.customizeDescription( errors.MissingContentLength.customizeDescription(
'If body is a stream, ContentLength must be provided')); 'If body is a stream, ContentLength must be provided'));
} else { } else {
if (typeof this.body === 'string') { if (typeof this.body === 'string') {
this.body = Buffer.from(this.body); this.body = Buffer.from(this.body);
@ -156,13 +156,13 @@ class GcpManagedUpload {
.map(item => .map(item =>
Object.assign(item, { ETag: this.parts[item.PartNumber] })); Object.assign(item, { ETag: this.parts[item.PartNumber] }));
return this.service.completeMultipartUpload(params, return this.service.completeMultipartUpload(params,
(err, res) => { (err, res) => {
if (err) { if (err) {
return this.cleanUp(err); return this.cleanUp(err);
} }
this.completed = true; this.completed = true;
return this.callback(null, res); return this.callback(null, res);
}); });
} }
/** /**
@ -187,16 +187,16 @@ class GcpManagedUpload {
if (this.body instanceof stream) { if (this.body instanceof stream) {
// stream type // stream type
this.body.on('error', err => this.cleanUp(err)) this.body.on('error', err => this.cleanUp(err))
.on('readable', () => this.chunkStream()) .on('readable', () => this.chunkStream())
.on('end', () => { .on('end', () => {
this.isDoneChunking = true; this.isDoneChunking = true;
this.chunkStream(); this.chunkStream();
if (this.isDoneChunking && this.uploadedParts >= 1 && if (this.isDoneChunking && this.uploadedParts >= 1 &&
this.uploadedParts === this.totalParts) { this.uploadedParts === this.totalParts) {
this.completeUpload(); this.completeUpload();
} }
}); });
} }
return undefined; return undefined;
} }
@ -251,22 +251,22 @@ class GcpManagedUpload {
}); });
}, },
next => async.eachLimit(this.slicedParts, this.queueSize, next => async.eachLimit(this.slicedParts, this.queueSize,
(uploadPart, done) => { (uploadPart, done) => {
const params = { const params = {
Bucket: this.mpuBucket, Bucket: this.mpuBucket,
Key: this.params.Key, Key: this.params.Key,
UploadId: this.uploadId, UploadId: this.uploadId,
Body: uploadPart.Body, Body: uploadPart.Body,
PartNumber: uploadPart.PartNumber, PartNumber: uploadPart.PartNumber,
}; };
this.service.uploadPart(params, (err, res) => { this.service.uploadPart(params, (err, res) => {
if (!err) { if (!err) {
this.parts[uploadPart.PartNumber] = res.ETag; this.parts[uploadPart.PartNumber] = res.ETag;
this.uploadedParts++; this.uploadedParts++;
} }
return done(err); return done(err);
}); });
}, next), }, next),
], err => { ], err => {
if (err) { if (err) {
return this.cleanUp(new Error( return this.cleanUp(new Error(

View File

@ -239,7 +239,7 @@ module.exports = {
const requestId = resp.httpResponse.headers ? const requestId = resp.httpResponse.headers ?
resp.httpResponse.headers['x-guploader-uploadid'] : null; resp.httpResponse.headers['x-guploader-uploadid'] : null;
if (resp.error) { if (resp.error) {
// eslint-disable-next-line no-param-reassign // eslint-disable-next-line no-param-reassign
resp.error.requestId = resp.requestId || requestId; resp.error.requestId = resp.requestId || requestId;
} }
}, },

View File

@ -120,10 +120,10 @@ class GcpClient extends AwsClient {
return this._client.createMultipartUpload(params, (err, mpuResObj) => { return this._client.createMultipartUpload(params, (err, mpuResObj) => {
if (err) { if (err) {
logHelper(log, 'error', 'err from data backend', logHelper(log, 'error', 'err from data backend',
err, this._dataStoreName, this.clientType); err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`GCP: ${err.message}`) `GCP: ${err.message}`),
); );
} }
return callback(null, mpuResObj); return callback(null, mpuResObj);
@ -162,32 +162,32 @@ class GcpClient extends AwsClient {
}; };
const completeObjData = { key: gcpKey }; const completeObjData = { key: gcpKey };
return this._client.completeMultipartUpload(mpuParams, return this._client.completeMultipartUpload(mpuParams,
(err, completeMpuRes) => { (err, completeMpuRes) => {
if (err) { if (err) {
logHelper(log, 'error', 'err from data backend on ' + logHelper(log, 'error', 'err from data backend on ' +
'completeMPU', err, this._dataStoreName, this.clientType); 'completeMPU', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`GCP: ${err.message}`) `GCP: ${err.message}`),
); );
} }
if (!completeMpuRes.VersionId) { if (!completeMpuRes.VersionId) {
logHelper(log, 'error', 'missing version id for data ' + logHelper(log, 'error', 'missing version id for data ' +
'backend object', missingVerIdInternalError, 'backend object', missingVerIdInternalError,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(missingVerIdInternalError); return callback(missingVerIdInternalError);
} }
// remove quotes from eTag because they're added later // remove quotes from eTag because they're added later
completeObjData.eTag = removeQuotes(completeMpuRes.ETag); completeObjData.eTag = removeQuotes(completeMpuRes.ETag);
completeObjData.dataStoreVersionId = completeMpuRes.VersionId; completeObjData.dataStoreVersionId = completeMpuRes.VersionId;
completeObjData.contentLength = completeObjData.contentLength =
Number.parseInt(completeMpuRes.ContentLength, 10); Number.parseInt(completeMpuRes.ContentLength, 10);
return callback(null, completeObjData); return callback(null, completeObjData);
}); });
} }
uploadPart(request, streamingV4Params, stream, size, key, uploadId, uploadPart(request, streamingV4Params, stream, size, key, uploadId,
partNumber, bucketName, log, callback) { partNumber, bucketName, log, callback) {
let hashedStream = stream; let hashedStream = stream;
if (request) { if (request) {
const partStream = prepareStream(request, streamingV4Params, const partStream = prepareStream(request, streamingV4Params,
@ -209,8 +209,8 @@ class GcpClient extends AwsClient {
logHelper(log, 'error', 'err from data backend ' + logHelper(log, 'error', 'err from data backend ' +
'on uploadPart', err, this._dataStoreName, this.clientType); 'on uploadPart', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`GCP: ${err.message}`) `GCP: ${err.message}`),
); );
} }
// remove quotes from eTag because they're added later // remove quotes from eTag because they're added later
@ -226,11 +226,11 @@ class GcpClient extends AwsClient {
} }
uploadPartCopy(request, gcpSourceKey, sourceLocationConstraintName, config, uploadPartCopy(request, gcpSourceKey, sourceLocationConstraintName, config,
log, callback) { log, callback) {
const destBucketName = request.bucketName; const destBucketName = request.bucketName;
const destObjectKey = request.objectKey; const destObjectKey = request.objectKey;
const destGcpKey = this._createGcpKey(destBucketName, destObjectKey, const destGcpKey = this._createGcpKey(destBucketName, destObjectKey,
this._bucketMatch); this._bucketMatch);
const sourceGcpBucketName = const sourceGcpBucketName =
config.getGcpBucketNames(sourceLocationConstraintName).bucketName; config.getGcpBucketNames(sourceLocationConstraintName).bucketName;
@ -241,8 +241,8 @@ class GcpClient extends AwsClient {
if (copySourceRange) { if (copySourceRange) {
return callback(errors.NotImplemented return callback(errors.NotImplemented
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`${this.clientType}: copySourceRange not implemented`) `${this.clientType}: copySourceRange not implemented`),
); );
} }
@ -260,15 +260,15 @@ class GcpClient extends AwsClient {
`${sourceGcpBucketName} GCP bucket`, err, `${sourceGcpBucketName} GCP bucket`, err,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.AccessDenied return callback(errors.AccessDenied
.customizeDescription('Error: Unable to access ' + .customizeDescription('Error: Unable to access ' +
`${sourceGcpBucketName} GCP bucket`) `${sourceGcpBucketName} GCP bucket`),
); );
} }
logHelper(log, 'error', 'error from data backend on ' + logHelper(log, 'error', 'error from data backend on ' +
'uploadPartCopy', err, this._dataStoreName); 'uploadPartCopy', err, this._dataStoreName);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`GCP: ${err.message}`) `GCP: ${err.message}`),
); );
} }
// remove quotes from eTag because they're added later // remove quotes from eTag because they're added later
@ -290,8 +290,8 @@ class GcpClient extends AwsClient {
logHelper(log, 'error', 'err from data backend ' + logHelper(log, 'error', 'err from data backend ' +
'on abortMPU', err, this._dataStoreName, this.clientType); 'on abortMPU', err, this._dataStoreName, this.clientType);
return callback(errors.ServiceUnavailable return callback(errors.ServiceUnavailable
.customizeDescription('Error returned from ' + .customizeDescription('Error returned from ' +
`GCP: ${err.message}`) `GCP: ${err.message}`),
); );
} }
return callback(); return callback();

View File

@ -45,7 +45,7 @@ class PfsClient {
} }
return callback(null, keyContext.objectKey, '', return callback(null, keyContext.objectKey, '',
keyContext.metaHeaders['x-amz-meta-size'], keyContext.metaHeaders['x-amz-meta-size'],
md5 md5,
); );
} }
logHelper(log, 'error', 'Not implemented', errors.NotImplemented, logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
@ -72,7 +72,7 @@ class PfsClient {
this._restClient.delete(key, reqUids, err => { this._restClient.delete(key, reqUids, err => {
if (err) { if (err) {
logHelper(log, 'error', 'err from data backend', err, logHelper(log, 'error', 'err from data backend', err,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(err); return callback(err);
} }
return callback(); return callback();
@ -86,61 +86,61 @@ class PfsClient {
} }
createMPU(key, metaHeaders, bucketName, websiteRedirectHeader, contentType, createMPU(key, metaHeaders, bucketName, websiteRedirectHeader, contentType,
cacheControl, contentDisposition, contentEncoding, log, callback) { cacheControl, contentDisposition, contentEncoding, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented, logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.NotImplemented); return callback(errors.NotImplemented);
} }
uploadPart(request, streamingV4Params, stream, size, key, uploadId, uploadPart(request, streamingV4Params, stream, size, key, uploadId,
partNumber, bucketName, log, callback) { partNumber, bucketName, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented, logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.NotImplemented); return callback(errors.NotImplemented);
} }
listParts(key, uploadId, bucketName, partNumberMarker, maxParts, log, listParts(key, uploadId, bucketName, partNumberMarker, maxParts, log,
callback) { callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented, logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.NotImplemented); return callback(errors.NotImplemented);
} }
completeMPU(jsonList, mdInfo, key, uploadId, bucketName, log, callback) { completeMPU(jsonList, mdInfo, key, uploadId, bucketName, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented, logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.NotImplemented); return callback(errors.NotImplemented);
} }
abortMPU(key, uploadId, bucketName, log, callback) { abortMPU(key, uploadId, bucketName, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented, logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.NotImplemented); return callback(errors.NotImplemented);
} }
objectPutTagging(key, bucket, objectMD, log, callback) { objectPutTagging(key, bucket, objectMD, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented, logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.NotImplemented); return callback(errors.NotImplemented);
} }
objectDeleteTagging(key, bucketName, objectMD, log, callback) { objectDeleteTagging(key, bucketName, objectMD, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented, logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.NotImplemented); return callback(errors.NotImplemented);
} }
copyObject(request, destLocationConstraintName, sourceKey, copyObject(request, destLocationConstraintName, sourceKey,
sourceLocationConstraintName, storeMetadataParams, config, log, callback) { sourceLocationConstraintName, storeMetadataParams, config, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented, logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.NotImplemented); return callback(errors.NotImplemented);
} }
uploadPartCopy(request, awsSourceKey, sourceLocationConstraintName, uploadPartCopy(request, awsSourceKey, sourceLocationConstraintName,
config, log, callback) { config, log, callback) {
logHelper(log, 'error', 'Not implemented', errors.NotImplemented, logHelper(log, 'error', 'Not implemented', errors.NotImplemented,
this._dataStoreName, this.clientType); this._dataStoreName, this.clientType);
return callback(errors.NotImplemented); return callback(errors.NotImplemented);
} }
} }

View File

@ -94,7 +94,7 @@ const utils = {
* same account since Azure copy outside of an account is async * same account since Azure copy outside of an account is async
*/ */
externalBackendCopy(config, locationConstraintSrc, locationConstraintDest, externalBackendCopy(config, locationConstraintSrc, locationConstraintDest,
sourceBucketMD, destBucketMD) { sourceBucketMD, destBucketMD) {
const sourceBucketName = sourceBucketMD.getName(); const sourceBucketName = sourceBucketMD.getName();
const destBucketName = destBucketMD.getName(); const destBucketName = destBucketMD.getName();
const isSameBucket = sourceBucketName === destBucketName; const isSameBucket = sourceBucketName === destBucketName;
@ -111,11 +111,11 @@ const utils = {
sourceLocationConstraintType === 'gcp' || sourceLocationConstraintType === 'gcp' ||
(sourceLocationConstraintType === 'azure' && (sourceLocationConstraintType === 'azure' &&
config.isSameAzureAccount(locationConstraintSrc, config.isSameAzureAccount(locationConstraintSrc,
locationConstraintDest))); locationConstraintDest)));
}, },
checkExternalBackend(clients, locations, type, flightCheckOnStartUp, checkExternalBackend(clients, locations, type, flightCheckOnStartUp,
externalBackendHealthCheckInterval, cb) { externalBackendHealthCheckInterval, cb) {
const checkStatus = backendHealth[type] || {}; const checkStatus = backendHealth[type] || {};
if (locations.length === 0) { if (locations.length === 0) {
return process.nextTick(cb, null, []); return process.nextTick(cb, null, []);

View File

@ -35,7 +35,6 @@ const FOLDER_HASH = 3511;
* directory hash structure under the configured dataPath. * directory hash structure under the configured dataPath.
*/ */
class DataFileStore { class DataFileStore {
/** /**
* @constructor * @constructor
* @param {Object} dataConfig - configuration of the file backend * @param {Object} dataConfig - configuration of the file backend
@ -78,7 +77,7 @@ class DataFileStore {
fs.access(this.dataPath, fs.F_OK | fs.R_OK | fs.W_OK, err => { fs.access(this.dataPath, fs.F_OK | fs.R_OK | fs.W_OK, err => {
if (err) { if (err) {
this.logger.error('Data path is not readable or writable', this.logger.error('Data path is not readable or writable',
{ error: err }); { error: err });
return callback(err); return callback(err);
} }
if (this.isPassthrough) { if (this.isPassthrough) {
@ -86,7 +85,7 @@ class DataFileStore {
} }
// Create FOLDER_HASH subdirectories // Create FOLDER_HASH subdirectories
const subDirs = Array.from({ length: FOLDER_HASH }, const subDirs = Array.from({ length: FOLDER_HASH },
(v, k) => (k).toString()); (v, k) => (k).toString());
this.logger.info(`pre-creating ${subDirs.length} subdirs...`); this.logger.info(`pre-creating ${subDirs.length} subdirs...`);
if (!this.noSync) { if (!this.noSync) {
storageUtils.setDirSyncFlag(this.dataPath, this.logger); storageUtils.setDirSyncFlag(this.dataPath, this.logger);
@ -103,7 +102,7 @@ class DataFileStore {
err => { err => {
if (err) { if (err) {
this.logger.error('Error creating subdirs', this.logger.error('Error creating subdirs',
{ error: err }); { error: err });
return callback(err); return callback(err);
} }
this.logger.info('data file store init complete, ' + this.logger.info('data file store init complete, ' +
@ -167,7 +166,7 @@ class DataFileStore {
fs.open(filePath, 'wx', (err, fd) => { fs.open(filePath, 'wx', (err, fd) => {
if (err) { if (err) {
log.error('error opening filePath', log.error('error opening filePath',
{ method: 'put', key, filePath, error: err }); { method: 'put', key, filePath, error: err });
return callback(errors.InternalError.customizeDescription( return callback(errors.InternalError.customizeDescription(
`filesystem error: open() returned ${err.code}`)); `filesystem error: open() returned ${err.code}`));
} }
@ -181,7 +180,7 @@ class DataFileStore {
fileStream.on('finish', () => { fileStream.on('finish', () => {
function ok() { function ok() {
log.debug('finished writing data', log.debug('finished writing data',
{ method: 'put', key, filePath }); { method: 'put', key, filePath });
return cbOnce(null, key); return cbOnce(null, key);
} }
if (this.noSync) { if (this.noSync) {
@ -243,7 +242,7 @@ class DataFileStore {
return undefined; return undefined;
}).on('error', err => { }).on('error', err => {
log.error('error streaming data on write', log.error('error streaming data on write',
{ method: 'put', key, filePath, error: err }); { method: 'put', key, filePath, error: err });
// destroying the write stream forces a close(fd) // destroying the write stream forces a close(fd)
fileStream.destroy(); fileStream.destroy();
return cbOnce(errors.InternalError.customizeDescription( return cbOnce(errors.InternalError.customizeDescription(
@ -262,7 +261,7 @@ class DataFileStore {
dataStream.on('close', () => { dataStream.on('close', () => {
// this means the underlying socket has been closed // this means the underlying socket has been closed
log.debug('Client closed socket while streaming', log.debug('Client closed socket while streaming',
{ method: 'put', key, filePath }); { method: 'put', key, filePath });
// destroying the write stream forces a close(fd) // destroying the write stream forces a close(fd)
fileStream.destroy(); fileStream.destroy();
// we need to unlink the file ourselves // we need to unlink the file ourselves
@ -294,7 +293,7 @@ class DataFileStore {
return callback(errors.ObjNotFound); return callback(errors.ObjNotFound);
} }
log.error('error on \'stat\' of file', log.error('error on \'stat\' of file',
{ key, filePath, error: err }); { key, filePath, error: err });
return callback(errors.InternalError.customizeDescription( return callback(errors.InternalError.customizeDescription(
`filesystem error: stat() returned ${err.code}`)); `filesystem error: stat() returned ${err.code}`));
} }
@ -330,34 +329,34 @@ class DataFileStore {
readStreamOptions.end = byteRange[1]; readStreamOptions.end = byteRange[1];
} }
log.debug('opening readStream to get data', log.debug('opening readStream to get data',
{ method: 'get', key, filePath, byteRange }); { method: 'get', key, filePath, byteRange });
const cbOnce = jsutil.once(callback); const cbOnce = jsutil.once(callback);
const rs = fs.createReadStream(filePath, readStreamOptions) const rs = fs.createReadStream(filePath, readStreamOptions)
.on('error', err => { .on('error', err => {
if (err.code === 'ENOENT') { if (err.code === 'ENOENT') {
return cbOnce(errors.ObjNotFound); return cbOnce(errors.ObjNotFound);
}
log.error('error retrieving file',
{ method: 'DataFileStore.get', key, filePath,
error: err });
return cbOnce(
errors.InternalError.customizeDescription(
`filesystem read error: ${err.code}`));
})
.on('open', () => { cbOnce(null, rs); })
.on('end', () => {
if (this.noCache) {
releasePageCacheSync(filePath, rs.fd, log);
}
fs.close(rs.fd, err => {
if (err) {
log.error('unable to close file descriptor', {
method: 'DataFileStore.get', key, filePath,
error: err,
});
} }
log.error('error retrieving file',
{ method: 'DataFileStore.get', key, filePath,
error: err });
return cbOnce(
errors.InternalError.customizeDescription(
`filesystem read error: ${err.code}`));
})
.on('open', () => { cbOnce(null, rs); })
.on('end', () => {
if (this.noCache) {
releasePageCacheSync(filePath, rs.fd, log);
}
fs.close(rs.fd, err => {
if (err) {
log.error('unable to close file descriptor', {
method: 'DataFileStore.get', key, filePath,
error: err,
});
}
});
}); });
});
} }
/** /**

View File

@ -12,7 +12,7 @@ function releasePageCacheSync(filePath, fd, log) {
const ret = posixFadvise(fd, 0, 0, 4); const ret = posixFadvise(fd, 0, 0, 4);
if (ret !== 0) { if (ret !== 0) {
log.warning( log.warning(
`error fadv_dontneed ${filePath} returned ${ret}`); `error fadv_dontneed ${filePath} returned ${ret}`);
} }
} }

View File

@ -37,16 +37,16 @@ const backend = {
} }
cursor += data.length; cursor += data.length;
}) })
.on('end', () => { .on('end', () => {
if (exceeded) { if (exceeded) {
log.error('data stream exceed announced size', log.error('data stream exceed announced size',
{ size, overflow: cursor }); { size, overflow: cursor });
callback(errors.InternalError); callback(errors.InternalError);
} else { } else {
ds[count] = { value, keyContext }; ds[count] = { value, keyContext };
callback(null, count++); callback(null, count++);
} }
}); });
}, },
get: function getMem(objectGetInfo, range, reqUids, callback) { get: function getMem(objectGetInfo, range, reqUids, callback) {

View File

@ -181,25 +181,25 @@ class MetadataWrapper {
const value = typeof objVal.getValue === 'function' ? const value = typeof objVal.getValue === 'function' ?
objVal.getValue() : objVal; objVal.getValue() : objVal;
this.client.putObject(bucketName, objName, value, params, log, this.client.putObject(bucketName, objName, value, params, log,
(err, data) => { (err, data) => {
if (err) { if (err) {
log.debug('error from metadata', { implName: this.implName, log.debug('error from metadata', { implName: this.implName,
error: err }); error: err });
return cb(err); return cb(err);
} }
if (data) { if (data) {
log.debug('object version successfully put in metadata', log.debug('object version successfully put in metadata',
{ version: data }); { version: data });
} else { } else {
log.debug('object successfully put in metadata'); log.debug('object successfully put in metadata');
} }
return cb(err, data); return cb(err, data);
}); });
} }
getBucketAndObjectMD(bucketName, objName, params, log, cb) { getBucketAndObjectMD(bucketName, objName, params, log, cb) {
log.debug('getting bucket and object from metadata', log.debug('getting bucket and object from metadata',
{ database: bucketName, object: objName }); { database: bucketName, object: objName });
this.client.getBucketAndObject(bucketName, objName, params, log, this.client.getBucketAndObject(bucketName, objName, params, log,
(err, data) => { (err, data) => {
if (err) { if (err) {
@ -208,7 +208,7 @@ class MetadataWrapper {
return cb(err); return cb(err);
} }
log.debug('bucket and object retrieved from metadata', log.debug('bucket and object retrieved from metadata',
{ database: bucketName, object: objName }); { database: bucketName, object: objName });
return cb(err, data); return cb(err, data);
}); });
} }

View File

@ -5,7 +5,7 @@ const BucketInfo = require('../../../models/BucketInfo');
class BucketClientInterface { class BucketClientInterface {
constructor(params, bucketclient, logger) { constructor(params, bucketclient, logger) {
assert(params.bucketdBootstrap.length > 0, assert(params.bucketdBootstrap.length > 0,
'bucketd bootstrap list is empty'); 'bucketd bootstrap list is empty');
const bootstrap = params.bucketdBootstrap; const bootstrap = params.bucketdBootstrap;
const log = params.bucketdLog; const log = params.bucketdLog;
if (params.https) { if (params.https) {
@ -29,7 +29,7 @@ class BucketClientInterface {
createBucket(bucketName, bucketMD, log, cb) { createBucket(bucketName, bucketMD, log, cb) {
this.client.createBucket(bucketName, log.getSerializedUids(), this.client.createBucket(bucketName, log.getSerializedUids(),
bucketMD.serialize(), cb); bucketMD.serialize(), cb);
return null; return null;
} }
@ -57,17 +57,17 @@ class BucketClientInterface {
getRaftBuckets(raftId, log, cb) { getRaftBuckets(raftId, log, cb) {
return this.client.getRaftBuckets(raftId, log.getSerializedUids(), return this.client.getRaftBuckets(raftId, log.getSerializedUids(),
(err, data) => { (err, data) => {
if (err) { if (err) {
return cb(err); return cb(err);
} }
return cb(null, JSON.parse(data)); return cb(null, JSON.parse(data));
}); });
} }
putBucketAttributes(bucketName, bucketMD, log, cb) { putBucketAttributes(bucketName, bucketMD, log, cb) {
this.client.putBucketAttributes(bucketName, log.getSerializedUids(), this.client.putBucketAttributes(bucketName, log.getSerializedUids(),
bucketMD.serialize(), cb); bucketMD.serialize(), cb);
return null; return null;
} }
@ -95,7 +95,7 @@ class BucketClientInterface {
deleteObject(bucketName, objName, params, log, cb) { deleteObject(bucketName, objName, params, log, cb) {
this.client.deleteObject(bucketName, objName, log.getSerializedUids(), this.client.deleteObject(bucketName, objName, log.getSerializedUids(),
cb, params); cb, params);
return null; return null;
} }
@ -183,8 +183,8 @@ class BucketClientInterface {
reason.msg = undefined; reason.msg = undefined;
respBody[implName] = { respBody[implName] = {
code: 200, code: 200,
message, // Provide interpreted reason msg message, // Provide interpreted reason msg
body: reason, // Provide analysis data body: reason, // Provide analysis data
}; };
if (failure) { if (failure) {
// Setting the `error` field is how the healthCheck // Setting the `error` field is how the healthCheck

View File

@ -30,7 +30,6 @@ class ListRecordStream extends stream.Transform {
* @classdesc Proxy object to access raft log API * @classdesc Proxy object to access raft log API
*/ */
class LogConsumer { class LogConsumer {
/** /**
* @constructor * @constructor
* *
@ -97,14 +96,14 @@ class LogConsumer {
if (err.code === 404) { if (err.code === 404) {
// no such raft session, log and ignore // no such raft session, log and ignore
this.logger.warn('raft session does not exist yet', this.logger.warn('raft session does not exist yet',
{ raftId: this.raftSession }); { raftId: this.raftSession });
return cbOnce(null, { info: { start: null, return cbOnce(null, { info: { start: null,
end: null } }); end: null } });
} }
if (err.code === 416) { if (err.code === 416) {
// requested range not satisfiable // requested range not satisfiable
this.logger.debug('no new log record to process', this.logger.debug('no new log record to process',
{ raftId: this.raftSession }); { raftId: this.raftSession });
return cbOnce(null, { info: { start: null, return cbOnce(null, { info: { start: null,
end: null } }); end: null } });
} }
@ -116,7 +115,7 @@ class LogConsumer {
// is emitted // is emitted
recordStream.on('error', err => { recordStream.on('error', err => {
this.logger.error('error receiving raft log', this.logger.error('error receiving raft log',
{ error: err.message }); { error: err.message });
return cbOnce(errors.InternalError); return cbOnce(errors.InternalError);
}); });
const jsonResponse = stream.pipe(jsonStream.parse('log.*')); const jsonResponse = stream.pipe(jsonStream.parse('log.*'));
@ -127,7 +126,7 @@ class LogConsumer {
// remove temporary listener // remove temporary listener
recordStream.removeAllListeners('error'); recordStream.removeAllListeners('error');
return cbOnce(null, { info: header.info, return cbOnce(null, { info: header.info,
log: recordStream }); log: recordStream });
}) })
.on('error', err => recordStream.emit('error', err)); .on('error', err => recordStream.emit('error', err));
return undefined; return undefined;

View File

@ -14,13 +14,13 @@ const _operatorType1 = joi.string().valid(
'$gt', '$gt',
'$gte', '$gte',
'$lt', '$lt',
'$lte' '$lte',
); );
// supports strings, numbers, and boolean // supports strings, numbers, and boolean
const _operatorType2 = joi.string().valid( const _operatorType2 = joi.string().valid(
'$eq', '$eq',
'$ne' '$ne',
); );
const _valueType1 = joi.alternatives([ const _valueType1 = joi.alternatives([

View File

@ -10,14 +10,13 @@ const list = require('../../../algos/list/exportAlgos');
const MetadataFileClient = require('./MetadataFileClient'); const MetadataFileClient = require('./MetadataFileClient');
const versionSep = const versionSep =
require('../../../versioning/constants') require('../../../versioning/constants')
.VersioningConstants.VersionId.Separator; .VersioningConstants.VersionId.Separator;
const METASTORE = '__metastore'; const METASTORE = '__metastore';
const itemScanRefreshDelay = 1000 * 30 * 60; // 30 minutes const itemScanRefreshDelay = 1000 * 30 * 60; // 30 minutes
class BucketFileInterface { class BucketFileInterface {
/** /**
* @constructor * @constructor
* @param {object} [params] - constructor params * @param {object} [params] - constructor params
@ -65,7 +64,7 @@ class BucketFileInterface {
if (err) { if (err) {
this.logger.fatal('error writing usersBucket ' + this.logger.fatal('error writing usersBucket ' +
'attributes to metadata', 'attributes to metadata',
{ error: err }); { error: err });
throw (errors.InternalError); throw (errors.InternalError);
} }
return done(); return done();
@ -104,8 +103,8 @@ class BucketFileInterface {
} }
this.lastItemScanTime = null; this.lastItemScanTime = null;
this.putBucketAttributes(bucketName, this.putBucketAttributes(bucketName,
bucketMD, bucketMD,
log, cb); log, cb);
return undefined; return undefined;
}); });
} }
@ -191,7 +190,7 @@ class BucketFileInterface {
errorStack: err.stack, errorStack: err.stack,
}; };
log.error('error deleting bucket', log.error('error deleting bucket',
logObj); logObj);
return cb(errors.InternalError); return cb(errors.InternalError);
} }
this.lastItemScanTime = null; this.lastItemScanTime = null;
@ -392,16 +391,16 @@ class BucketFileInterface {
cbDone = true; cbDone = true;
async.eachSeries(res.bucketList, (bucket, cb) => { async.eachSeries(res.bucketList, (bucket, cb) => {
this.getBucketAttributes(bucket.name, log, this.getBucketAttributes(bucket.name, log,
(err, bucketInfo) => { (err, bucketInfo) => {
if (err) { if (err) {
return cb(err); return cb(err);
} }
/* eslint-disable no-param-reassign */ /* eslint-disable no-param-reassign */
bucket.location = bucket.location =
bucketInfo.getLocationConstraint(); bucketInfo.getLocationConstraint();
/* eslint-enable no-param-reassign */ /* eslint-enable no-param-reassign */
return cb(); return cb();
}); });
}, err => { }, err => {
if (!err) { if (!err) {
this.lastItemScanTime = Date.now(); this.lastItemScanTime = Date.now();

View File

@ -8,7 +8,6 @@ const { RecordLogProxy } = require('./RecordLog.js');
const werelogs = require('werelogs'); const werelogs = require('werelogs');
class MetadataFileClient { class MetadataFileClient {
/** /**
* Construct a metadata client * Construct a metadata client
* *
@ -86,7 +85,7 @@ class MetadataFileClient {
logProxy.connect(err => { logProxy.connect(err => {
if (err) { if (err) {
this.logger.error('error connecting to record log service', this.logger.error('error connecting to record log service',
{ url, error: err.stack }); { url, error: err.stack });
return done(err); return done(err);
} }
this.logger.info('connected to record log service', { url }); this.logger.info('connected to record log service', { url });

View File

@ -25,7 +25,6 @@ const SYNC_OPTIONS = { sync: true };
const SUBLEVEL_SEP = '::'; const SUBLEVEL_SEP = '::';
class MetadataFileServer { class MetadataFileServer {
/** /**
* Construct a metadata server * Construct a metadata server
* *
@ -218,7 +217,7 @@ class MetadataFileServer {
}); });
} else { } else {
this.rootDb.batch(ops, SYNC_OPTIONS, this.rootDb.batch(ops, SYNC_OPTIONS,
err => callback(err)); err => callback(err));
} }
}, },
}; };
@ -235,17 +234,17 @@ class MetadataFileServer {
put: (env, key, value, options, cb) => { put: (env, key, value, options, cb) => {
const dbName = env.subLevel.join(SUBLEVEL_SEP); const dbName = env.subLevel.join(SUBLEVEL_SEP);
vrp.put({ db: dbName, key, value, options }, vrp.put({ db: dbName, key, value, options },
env.requestLogger, cb); env.requestLogger, cb);
}, },
del: (env, key, options, cb) => { del: (env, key, options, cb) => {
const dbName = env.subLevel.join(SUBLEVEL_SEP); const dbName = env.subLevel.join(SUBLEVEL_SEP);
vrp.del({ db: dbName, key, options }, vrp.del({ db: dbName, key, options },
env.requestLogger, cb); env.requestLogger, cb);
}, },
get: (env, key, options, cb) => { get: (env, key, options, cb) => {
const dbName = env.subLevel.join(SUBLEVEL_SEP); const dbName = env.subLevel.join(SUBLEVEL_SEP);
vrp.get({ db: dbName, key, options }, vrp.get({ db: dbName, key, options },
env.requestLogger, cb); env.requestLogger, cb);
}, },
getDiskUsage: (env, cb) => diskusage.check(this.path, cb), getDiskUsage: (env, cb) => diskusage.check(this.path, cb),
}); });

View File

@ -18,7 +18,6 @@ const DEFAULT_RECORD_LOG_NAME = 's3-recordlog';
* object. * object.
*/ */
class RecordLogProxy extends rpc.BaseClient { class RecordLogProxy extends rpc.BaseClient {
constructor(params) { constructor(params) {
super(params); super(params);
@ -102,7 +101,6 @@ class ListRecordStream extends stream.Transform {
* updates can be transactional with each other. * updates can be transactional with each other.
*/ */
class RecordLogService extends rpc.BaseService { class RecordLogService extends rpc.BaseService {
/** /**
* @constructor * @constructor
* *
@ -274,12 +272,12 @@ class RecordLogService extends rpc.BaseService {
limit: _params.limit, limit: _params.limit,
}; };
const userStream = new ListRecordStream(endSeq, const userStream = new ListRecordStream(endSeq,
_params.limit); _params.limit);
const dbStream = const dbStream =
openLog.logDb.createReadStream(queryParams); openLog.logDb.createReadStream(queryParams);
dbStream.pipe(userStream); dbStream.pipe(userStream);
dbStream.once('error', dbStream.once('error',
err => userStream.emit('error', err)); err => userStream.emit('error', err));
userStream.once('error', err => { userStream.once('error', err => {
userStream.removeAllListeners('info'); userStream.removeAllListeners('info');
cb(err); cb(err);

Some files were not shown because too many files have changed in this diff Show More