Compare commits
No commits in common. "c858c813e2a4d075a78b254ba651add6cb745b23" and "32c895b21a31eb67dacc6e76d7f58b8142bf3ad1" have entirely different histories.
c858c813e2
...
32c895b21a
|
@ -252,10 +252,6 @@
|
|||
"code": 404,
|
||||
"description": "The lifecycle configuration does not exist."
|
||||
},
|
||||
"NoSuchObjectLockConfiguration": {
|
||||
"code": 404,
|
||||
"description": "The specified object does not have a ObjectLock configuration."
|
||||
},
|
||||
"NoSuchWebsiteConfiguration": {
|
||||
"code": 404,
|
||||
"description": "The specified bucket does not have a website configuration"
|
||||
|
@ -272,10 +268,6 @@
|
|||
"code": 404,
|
||||
"description": "The replication configuration was not found"
|
||||
},
|
||||
"ObjectLockConfigurationNotFoundError": {
|
||||
"code": 404,
|
||||
"description": "The object lock configuration was not found"
|
||||
},
|
||||
"NotImplemented": {
|
||||
"code": 501,
|
||||
"description": "A header you provided implies functionality that is not implemented."
|
||||
|
|
14
index.js
14
index.js
|
@ -24,12 +24,6 @@ module.exports = {
|
|||
listTools: {
|
||||
DelimiterTools: require('./lib/algos/list/tools'),
|
||||
},
|
||||
cache: {
|
||||
LRUCache: require('./lib/algos/cache/LRUCache'),
|
||||
},
|
||||
stream: {
|
||||
MergeStream: require('./lib/algos/stream/MergeStream'),
|
||||
},
|
||||
},
|
||||
policies: {
|
||||
evaluators: require('./lib/policyEvaluator/evaluator.js'),
|
||||
|
@ -37,7 +31,6 @@ module.exports = {
|
|||
.validateUserPolicy,
|
||||
evaluatePrincipal: require('./lib/policyEvaluator/principal'),
|
||||
RequestContext: require('./lib/policyEvaluator/RequestContext.js'),
|
||||
requestUtils: require('./lib/policyEvaluator/requestUtils'),
|
||||
},
|
||||
Clustering: require('./lib/Clustering'),
|
||||
testing: {
|
||||
|
@ -71,7 +64,6 @@ module.exports = {
|
|||
userMetadata: require('./lib/s3middleware/userMetadata'),
|
||||
convertToXml: require('./lib/s3middleware/convertToXml'),
|
||||
escapeForXml: require('./lib/s3middleware/escapeForXml'),
|
||||
objectLegalHold: require('./lib/s3middleware/objectLegalHold'),
|
||||
tagging: require('./lib/s3middleware/tagging'),
|
||||
validateConditionalHeaders:
|
||||
require('./lib/s3middleware/validateConditionalHeaders')
|
||||
|
@ -87,7 +79,6 @@ module.exports = {
|
|||
SubStreamInterface:
|
||||
require('./lib/s3middleware/azureHelpers/SubStreamInterface'),
|
||||
},
|
||||
retention: require('./lib/s3middleware/objectRetention'),
|
||||
},
|
||||
storage: {
|
||||
metadata: {
|
||||
|
@ -117,8 +108,6 @@ module.exports = {
|
|||
LifecycleConfiguration:
|
||||
require('./lib/models/LifecycleConfiguration'),
|
||||
BucketPolicy: require('./lib/models/BucketPolicy'),
|
||||
ObjectLockConfiguration:
|
||||
require('./lib/models/ObjectLockConfiguration'),
|
||||
},
|
||||
metrics: {
|
||||
StatsClient: require('./lib/metrics/StatsClient'),
|
||||
|
@ -128,7 +117,4 @@ module.exports = {
|
|||
pensieve: {
|
||||
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
|
||||
},
|
||||
stream: {
|
||||
readJSONStreamObject: require('./lib/stream/readJSONStreamObject'),
|
||||
},
|
||||
};
|
||||
|
|
|
@ -1,167 +0,0 @@
|
|||
const assert = require('assert');
|
||||
|
||||
/**
|
||||
* @class
|
||||
* @classdesc Implements a key-value in-memory cache with a capped
|
||||
* number of items and a Least Recently Used (LRU) strategy for
|
||||
* eviction.
|
||||
*/
|
||||
class LRUCache {
|
||||
/**
|
||||
* @constructor
|
||||
* @param {number} maxEntries - maximum number of entries kept in
|
||||
* the cache
|
||||
*/
|
||||
constructor(maxEntries) {
|
||||
assert(maxEntries >= 1);
|
||||
this._maxEntries = maxEntries;
|
||||
this.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Add or update the value associated to a key in the cache,
|
||||
* making it the most recently accessed for eviction purpose.
|
||||
*
|
||||
* @param {string} key - key to add
|
||||
* @param {object} value - associated value (can be of any type)
|
||||
* @return {boolean} true if the cache contained an entry with
|
||||
* this key, false if it did not
|
||||
*/
|
||||
add(key, value) {
|
||||
let entry = this._entryMap[key];
|
||||
if (entry) {
|
||||
entry.value = value;
|
||||
// make the entry the most recently used by re-pushing it
|
||||
// to the head of the LRU list
|
||||
this._lruRemoveEntry(entry);
|
||||
this._lruPushEntry(entry);
|
||||
return true;
|
||||
}
|
||||
if (this._entryCount === this._maxEntries) {
|
||||
// if the cache is already full, abide by the LRU strategy
|
||||
// and remove the least recently used entry from the cache
|
||||
// before pushing the new entry
|
||||
this._removeEntry(this._lruTail);
|
||||
}
|
||||
entry = { key, value };
|
||||
this._entryMap[key] = entry;
|
||||
this._entryCount += 1;
|
||||
this._lruPushEntry(entry);
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the value associated to a key in the cache, making it the
|
||||
* most recently accessed for eviction purpose.
|
||||
*
|
||||
* @param {string} key - key of which to fetch the associated value
|
||||
* @return {object|undefined} - returns the associated value if
|
||||
* exists in the cache, or undefined if not found - either if the
|
||||
* key was never added or if it has been evicted from the cache.
|
||||
*/
|
||||
get(key) {
|
||||
const entry = this._entryMap[key];
|
||||
if (entry) {
|
||||
// make the entry the most recently used by re-pushing it
|
||||
// to the head of the LRU list
|
||||
this._lruRemoveEntry(entry);
|
||||
this._lruPushEntry(entry);
|
||||
return entry.value;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an entry from the cache if exists
|
||||
*
|
||||
* @param {string} key - key to remove
|
||||
* @return {boolean} true if an entry has been removed, false if
|
||||
* there was no entry with this key in the cache - either if the
|
||||
* key was never added or if it has been evicted from the cache.
|
||||
*/
|
||||
remove(key) {
|
||||
const entry = this._entryMap[key];
|
||||
if (entry) {
|
||||
this._removeEntry(entry);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current number of cached entries
|
||||
*
|
||||
* @return {number} current number of cached entries
|
||||
*/
|
||||
count() {
|
||||
return this._entryCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all entries from the cache
|
||||
*
|
||||
* @return {undefined}
|
||||
*/
|
||||
clear() {
|
||||
this._entryMap = {};
|
||||
this._entryCount = 0;
|
||||
this._lruHead = null;
|
||||
this._lruTail = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Push an entry to the front of the LRU list, making it the most
|
||||
* recently accessed
|
||||
*
|
||||
* @param {object} entry - entry to push
|
||||
* @return {undefined}
|
||||
*/
|
||||
_lruPushEntry(entry) {
|
||||
/* eslint-disable no-param-reassign */
|
||||
entry._lruNext = this._lruHead;
|
||||
entry._lruPrev = null;
|
||||
if (this._lruHead) {
|
||||
this._lruHead._lruPrev = entry;
|
||||
}
|
||||
this._lruHead = entry;
|
||||
if (!this._lruTail) {
|
||||
this._lruTail = entry;
|
||||
}
|
||||
/* eslint-enable no-param-reassign */
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an entry from the LRU list
|
||||
*
|
||||
* @param {object} entry - entry to remove
|
||||
* @return {undefined}
|
||||
*/
|
||||
_lruRemoveEntry(entry) {
|
||||
/* eslint-disable no-param-reassign */
|
||||
if (entry._lruPrev) {
|
||||
entry._lruPrev._lruNext = entry._lruNext;
|
||||
} else {
|
||||
this._lruHead = entry._lruNext;
|
||||
}
|
||||
if (entry._lruNext) {
|
||||
entry._lruNext._lruPrev = entry._lruPrev;
|
||||
} else {
|
||||
this._lruTail = entry._lruPrev;
|
||||
}
|
||||
/* eslint-enable no-param-reassign */
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to remove an existing entry from the cache
|
||||
*
|
||||
* @param {object} entry - cache entry to remove
|
||||
* @return {undefined}
|
||||
*/
|
||||
_removeEntry(entry) {
|
||||
this._lruRemoveEntry(entry);
|
||||
delete this._entryMap[entry.key];
|
||||
this._entryCount -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LRUCache;
|
|
@ -1,10 +1,7 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
||||
FILTER_END, FILTER_ACCEPT } = require('./tools');
|
||||
const { inc, checkLimit, FILTER_END, FILTER_ACCEPT } = require('./tools');
|
||||
const DEFAULT_MAX_KEYS = 1000;
|
||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||
|
||||
function numberDefault(num, defaultNum) {
|
||||
const parsedNum = Number.parseInt(num, 10);
|
||||
|
@ -20,12 +17,10 @@ class MultipartUploads {
|
|||
* Init and check parameters
|
||||
* @param {Object} params - The parameters you sent to DBD
|
||||
* @param {RequestLogger} logger - The logger of the request
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
* @return {undefined}
|
||||
*/
|
||||
constructor(params, logger, vFormat) {
|
||||
constructor(params, logger) {
|
||||
this.params = params;
|
||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||
this.CommonPrefixes = [];
|
||||
this.Uploads = [];
|
||||
this.IsTruncated = false;
|
||||
|
@ -38,20 +33,9 @@ class MultipartUploads {
|
|||
this.delimiter = params.delimiter;
|
||||
this.splitter = params.splitter;
|
||||
this.logger = logger;
|
||||
|
||||
Object.assign(this, {
|
||||
[BucketVersioningKeyFormat.v0]: {
|
||||
genMDParams: this.genMDParamsV0,
|
||||
getObjectKey: this.getObjectKeyV0,
|
||||
},
|
||||
[BucketVersioningKeyFormat.v1]: {
|
||||
genMDParams: this.genMDParamsV1,
|
||||
getObjectKey: this.getObjectKeyV1,
|
||||
},
|
||||
}[this.vFormat]);
|
||||
}
|
||||
|
||||
genMDParamsV0() {
|
||||
genMDParams() {
|
||||
const params = {};
|
||||
if (this.params.keyMarker) {
|
||||
params.gt = `overview${this.params.splitter}` +
|
||||
|
@ -73,11 +57,6 @@ class MultipartUploads {
|
|||
return params;
|
||||
}
|
||||
|
||||
genMDParamsV1() {
|
||||
const v0params = this.genMDParamsV0();
|
||||
return listingParamsMasterKeysV0ToV1(v0params);
|
||||
}
|
||||
|
||||
/**
|
||||
* This function adds the elements to the Uploads
|
||||
* Set the NextKeyMarker to the current key
|
||||
|
@ -122,14 +101,6 @@ class MultipartUploads {
|
|||
}
|
||||
}
|
||||
|
||||
getObjectKeyV0(obj) {
|
||||
return obj.key;
|
||||
}
|
||||
|
||||
getObjectKeyV1(obj) {
|
||||
return obj.key.slice(DbPrefixes.Master.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* This function applies filter on each element
|
||||
* @param {String} obj - The key and value of the element
|
||||
|
@ -142,7 +113,7 @@ class MultipartUploads {
|
|||
this.IsTruncated = this.maxKeys > 0;
|
||||
return FILTER_END;
|
||||
}
|
||||
const key = this.getObjectKey(obj);
|
||||
const key = obj.key;
|
||||
const value = obj.value;
|
||||
if (this.delimiter) {
|
||||
const mpuPrefixSlice = `overview${this.splitter}`.length;
|
||||
|
|
|
@ -28,14 +28,14 @@ class List extends Extension {
|
|||
}
|
||||
|
||||
genMDParams() {
|
||||
const params = this.parameters ? {
|
||||
const params = {
|
||||
gt: this.parameters.gt,
|
||||
gte: this.parameters.gte || this.parameters.start,
|
||||
lt: this.parameters.lt,
|
||||
lte: this.parameters.lte || this.parameters.end,
|
||||
keys: this.parameters.keys,
|
||||
values: this.parameters.values,
|
||||
} : {};
|
||||
};
|
||||
Object.keys(params).forEach(key => {
|
||||
if (params[key] === null || params[key] === undefined) {
|
||||
delete params[key];
|
||||
|
|
|
@ -1,10 +1,19 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const Extension = require('./Extension').default;
|
||||
const { inc, listingParamsMasterKeysV0ToV1,
|
||||
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
||||
|
||||
/**
|
||||
* Find the next delimiter in the path
|
||||
*
|
||||
* @param {string} key - path of the object
|
||||
* @param {string} delimiter - string to find
|
||||
* @param {number} index - index to start at
|
||||
* @return {number} delimiterIndex - returns -1 in case no delimiter is found
|
||||
*/
|
||||
function nextDelimiter(key, delimiter, index) {
|
||||
return key.indexOf(delimiter, index);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the common prefix in the path
|
||||
|
@ -53,9 +62,8 @@ class Delimiter extends Extension {
|
|||
* or not
|
||||
* @param {RequestLogger} logger - The logger of the
|
||||
* request
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
*/
|
||||
constructor(parameters, logger, vFormat) {
|
||||
constructor(parameters, logger) {
|
||||
super(parameters, logger);
|
||||
// original listing parameters
|
||||
this.delimiter = parameters.delimiter;
|
||||
|
@ -68,7 +76,6 @@ class Delimiter extends Extension {
|
|||
typeof parameters.alphabeticalOrder !== 'undefined' ?
|
||||
parameters.alphabeticalOrder : true;
|
||||
|
||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||
// results
|
||||
this.CommonPrefixes = [];
|
||||
this.Contents = [];
|
||||
|
@ -92,22 +99,9 @@ class Delimiter extends Extension {
|
|||
this[this.nextContinueMarker].slice(0, nextDelimiterIndex +
|
||||
this.delimiter.length);
|
||||
}
|
||||
|
||||
Object.assign(this, {
|
||||
[BucketVersioningKeyFormat.v0]: {
|
||||
genMDParams: this.genMDParamsV0,
|
||||
getObjectKey: this.getObjectKeyV0,
|
||||
skipping: this.skippingV0,
|
||||
},
|
||||
[BucketVersioningKeyFormat.v1]: {
|
||||
genMDParams: this.genMDParamsV1,
|
||||
getObjectKey: this.getObjectKeyV1,
|
||||
skipping: this.skippingV1,
|
||||
},
|
||||
}[this.vFormat]);
|
||||
}
|
||||
|
||||
genMDParamsV0() {
|
||||
genMDParams() {
|
||||
const params = {};
|
||||
if (this.prefix) {
|
||||
params.gte = this.prefix;
|
||||
|
@ -124,11 +118,6 @@ class Delimiter extends Extension {
|
|||
return params;
|
||||
}
|
||||
|
||||
genMDParamsV1() {
|
||||
const params = this.genMDParamsV0();
|
||||
return listingParamsMasterKeysV0ToV1(params);
|
||||
}
|
||||
|
||||
/**
|
||||
* check if the max keys count has been reached and set the
|
||||
* final state of the result if it is the case
|
||||
|
@ -161,14 +150,6 @@ class Delimiter extends Extension {
|
|||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
getObjectKeyV0(obj) {
|
||||
return obj.key;
|
||||
}
|
||||
|
||||
getObjectKeyV1(obj) {
|
||||
return obj.key.slice(DbPrefixes.Master.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter to apply on each iteration, based on:
|
||||
* - prefix
|
||||
|
@ -181,7 +162,7 @@ class Delimiter extends Extension {
|
|||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
filter(obj) {
|
||||
const key = this.getObjectKey(obj);
|
||||
const key = obj.key;
|
||||
const value = obj.value;
|
||||
if ((this.prefix && !key.startsWith(this.prefix))
|
||||
|| (this.alphabeticalOrder
|
||||
|
@ -191,7 +172,9 @@ class Delimiter extends Extension {
|
|||
}
|
||||
if (this.delimiter) {
|
||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||
const delimiterIndex = nextDelimiter(key,
|
||||
this.delimiter,
|
||||
baseIndex);
|
||||
if (delimiterIndex === -1) {
|
||||
return this.addContents(key, value);
|
||||
}
|
||||
|
@ -222,27 +205,15 @@ class Delimiter extends Extension {
|
|||
}
|
||||
|
||||
/**
|
||||
* If repd happens to want to skip listing on a bucket in v0
|
||||
* versioning key format, here is an idea.
|
||||
* If repd happens to want to skip listing, here is an idea.
|
||||
*
|
||||
* @return {string} - the present range (NextMarker) if repd believes
|
||||
* that it's enough and should move on
|
||||
*/
|
||||
skippingV0() {
|
||||
skipping() {
|
||||
return this[this.nextContinueMarker];
|
||||
}
|
||||
|
||||
/**
|
||||
* If repd happens to want to skip listing on a bucket in v1
|
||||
* versioning key format, here is an idea.
|
||||
*
|
||||
* @return {string} - the present range (NextMarker) if repd believes
|
||||
* that it's enough and should move on
|
||||
*/
|
||||
skippingV1() {
|
||||
return DbPrefixes.Master + this[this.nextContinueMarker];
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an object containing all mandatory fields to use once the
|
||||
* iteration is done, doesn't show a NextMarker field if the output
|
||||
|
|
|
@ -3,11 +3,9 @@
|
|||
const Delimiter = require('./delimiter').Delimiter;
|
||||
const Version = require('../../versioning/Version').Version;
|
||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||
const { BucketVersioningKeyFormat } = VSConst;
|
||||
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
||||
|
||||
const VID_SEP = VSConst.VersionId.Separator;
|
||||
const { DbPrefixes } = VSConst;
|
||||
|
||||
/**
|
||||
* Handle object listing with parameters. This extends the base class Delimiter
|
||||
|
@ -25,29 +23,16 @@ class DelimiterMaster extends Delimiter {
|
|||
* @param {String} parameters.startAfter - marker per amazon v2 format
|
||||
* @param {String} parameters.continuationToken - obfuscated amazon token
|
||||
* @param {RequestLogger} logger - The logger of the request
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
*/
|
||||
constructor(parameters, logger, vFormat) {
|
||||
super(parameters, logger, vFormat);
|
||||
constructor(parameters, logger) {
|
||||
super(parameters, logger);
|
||||
// non-PHD master version or a version whose master is a PHD version
|
||||
this.prvKey = undefined;
|
||||
this.prvPHDKey = undefined;
|
||||
|
||||
Object.assign(this, {
|
||||
[BucketVersioningKeyFormat.v0]: {
|
||||
filter: this.filterV0,
|
||||
skipping: this.skippingV0,
|
||||
},
|
||||
[BucketVersioningKeyFormat.v1]: {
|
||||
filter: this.filterV1,
|
||||
skipping: this.skippingV1,
|
||||
},
|
||||
}[this.vFormat]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter to apply on each iteration for buckets in v0 format,
|
||||
* based on:
|
||||
* Filter to apply on each iteration, based on:
|
||||
* - prefix
|
||||
* - delimiter
|
||||
* - maxKeys
|
||||
|
@ -57,7 +42,7 @@ class DelimiterMaster extends Delimiter {
|
|||
* @param {String} obj.value - The value of the element
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
filterV0(obj) {
|
||||
filter(obj) {
|
||||
let key = obj.key;
|
||||
const value = obj.value;
|
||||
|
||||
|
@ -136,26 +121,7 @@ class DelimiterMaster extends Delimiter {
|
|||
return this.addContents(key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter to apply on each iteration for buckets in v1 format,
|
||||
* based on:
|
||||
* - prefix
|
||||
* - delimiter
|
||||
* - maxKeys
|
||||
* The marker is being handled directly by levelDB
|
||||
* @param {Object} obj - The key and value of the element
|
||||
* @param {String} obj.key - The key of the element
|
||||
* @param {String} obj.value - The value of the element
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
filterV1(obj) {
|
||||
// Filtering master keys in v1 is simply listing the master
|
||||
// keys, as the state of version keys do not change the
|
||||
// result, so we can use Delimiter method directly.
|
||||
return super.filter(obj);
|
||||
}
|
||||
|
||||
skippingV0() {
|
||||
skipping() {
|
||||
if (this[this.nextContinueMarker]) {
|
||||
// next marker or next continuation token:
|
||||
// - foo/ : skipping foo/
|
||||
|
@ -169,14 +135,6 @@ class DelimiterMaster extends Delimiter {
|
|||
}
|
||||
return SKIP_NONE;
|
||||
}
|
||||
|
||||
skippingV1() {
|
||||
const skipTo = this.skippingV0();
|
||||
if (skipTo === SKIP_NONE) {
|
||||
return SKIP_NONE;
|
||||
}
|
||||
return DbPrefixes.Master + skipTo;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { DelimiterMaster };
|
||||
|
|
|
@ -7,7 +7,10 @@ const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
|||
require('./tools');
|
||||
|
||||
const VID_SEP = VSConst.VersionId.Separator;
|
||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||
|
||||
function formatVersionKey(key, versionId) {
|
||||
return `${key}${VID_SEP}${versionId}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle object listing with parameters
|
||||
|
@ -22,8 +25,8 @@ const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
|||
* @prop {Number} maxKeys - number of keys to list
|
||||
*/
|
||||
class DelimiterVersions extends Delimiter {
|
||||
constructor(parameters, logger, vFormat) {
|
||||
super(parameters, logger, vFormat);
|
||||
constructor(parameters, logger) {
|
||||
super(parameters, logger);
|
||||
// specific to version listing
|
||||
this.keyMarker = parameters.keyMarker;
|
||||
this.versionIdMarker = parameters.versionIdMarker;
|
||||
|
@ -33,22 +36,9 @@ class DelimiterVersions extends Delimiter {
|
|||
// listing results
|
||||
this.NextMarker = parameters.keyMarker;
|
||||
this.NextVersionIdMarker = undefined;
|
||||
|
||||
Object.assign(this, {
|
||||
[BucketVersioningKeyFormat.v0]: {
|
||||
genMDParams: this.genMDParamsV0,
|
||||
filter: this.filterV0,
|
||||
skipping: this.skippingV0,
|
||||
},
|
||||
[BucketVersioningKeyFormat.v1]: {
|
||||
genMDParams: this.genMDParamsV1,
|
||||
filter: this.filterV1,
|
||||
skipping: this.skippingV1,
|
||||
},
|
||||
}[this.vFormat]);
|
||||
}
|
||||
|
||||
genMDParamsV0() {
|
||||
genMDParams() {
|
||||
const params = {};
|
||||
if (this.parameters.prefix) {
|
||||
params.gte = this.parameters.prefix;
|
||||
|
@ -62,9 +52,8 @@ class DelimiterVersions extends Delimiter {
|
|||
if (this.parameters.versionIdMarker) {
|
||||
// versionIdMarker should always come with keyMarker
|
||||
// but may not be the other way around
|
||||
params.gt = this.parameters.keyMarker
|
||||
+ VID_SEP
|
||||
+ this.parameters.versionIdMarker;
|
||||
params.gt = formatVersionKey(this.parameters.keyMarker,
|
||||
this.parameters.versionIdMarker);
|
||||
} else {
|
||||
params.gt = inc(this.parameters.keyMarker + VID_SEP);
|
||||
}
|
||||
|
@ -72,59 +61,6 @@ class DelimiterVersions extends Delimiter {
|
|||
return params;
|
||||
}
|
||||
|
||||
genMDParamsV1() {
|
||||
// return an array of two listing params sets to ask for
|
||||
// synchronized listing of M and V ranges
|
||||
const params = [{}, {}];
|
||||
if (this.parameters.prefix) {
|
||||
params[0].gte = DbPrefixes.Master + this.parameters.prefix;
|
||||
params[0].lt = DbPrefixes.Master + inc(this.parameters.prefix);
|
||||
params[1].gte = DbPrefixes.Version + this.parameters.prefix;
|
||||
params[1].lt = DbPrefixes.Version + inc(this.parameters.prefix);
|
||||
} else {
|
||||
params[0].gte = DbPrefixes.Master;
|
||||
params[0].lt = inc(DbPrefixes.Master); // stop after the last master key
|
||||
params[1].gte = DbPrefixes.Version;
|
||||
params[1].lt = inc(DbPrefixes.Version); // stop after the last version key
|
||||
}
|
||||
if (this.parameters.keyMarker) {
|
||||
if (params[1].gte <= DbPrefixes.Version + this.parameters.keyMarker) {
|
||||
delete params[0].gte;
|
||||
delete params[1].gte;
|
||||
params[0].gt = DbPrefixes.Master + inc(this.parameters.keyMarker + VID_SEP);
|
||||
if (this.parameters.versionIdMarker) {
|
||||
// versionIdMarker should always come with keyMarker
|
||||
// but may not be the other way around
|
||||
params[1].gt = DbPrefixes.Version
|
||||
+ this.parameters.keyMarker
|
||||
+ VID_SEP
|
||||
+ this.parameters.versionIdMarker;
|
||||
} else {
|
||||
params[1].gt = DbPrefixes.Version
|
||||
+ inc(this.parameters.keyMarker + VID_SEP);
|
||||
}
|
||||
}
|
||||
}
|
||||
return params;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to synchronize listing of M and V prefixes by object key
|
||||
*
|
||||
* @param {object} masterObj object listed from first range
|
||||
* returned by genMDParamsV1() (the master keys range)
|
||||
* @param {object} versionObj object listed from second range
|
||||
* returned by genMDParamsV1() (the version keys range)
|
||||
* @return {number} comparison result:
|
||||
* * -1 if master key < version key
|
||||
* * 1 if master key > version key
|
||||
*/
|
||||
compareObjects(masterObj, versionObj) {
|
||||
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
|
||||
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
|
||||
return masterKey < versionKey ? -1 : 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a (key, versionId, value) tuple to the listing.
|
||||
* Set the NextMarker to the current key
|
||||
|
@ -151,8 +87,7 @@ class DelimiterVersions extends Delimiter {
|
|||
}
|
||||
|
||||
/**
|
||||
* Filter to apply on each iteration if bucket is in v0
|
||||
* versioning key format, based on:
|
||||
* Filter to apply on each iteration, based on:
|
||||
* - prefix
|
||||
* - delimiter
|
||||
* - maxKeys
|
||||
|
@ -162,50 +97,27 @@ class DelimiterVersions extends Delimiter {
|
|||
* @param {String} obj.value - The value of the element
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
filterV0(obj) {
|
||||
filter(obj) {
|
||||
if (Version.isPHD(obj.value)) {
|
||||
return FILTER_ACCEPT; // trick repd to not increase its streak
|
||||
}
|
||||
return this.filterCommon(obj.key, obj.value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter to apply on each iteration if bucket is in v1
|
||||
* versioning key format, based on:
|
||||
* - prefix
|
||||
* - delimiter
|
||||
* - maxKeys
|
||||
* The marker is being handled directly by levelDB
|
||||
* @param {Object} obj - The key and value of the element
|
||||
* @param {String} obj.key - The key of the element
|
||||
* @param {String} obj.value - The value of the element
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
filterV1(obj) {
|
||||
// this function receives both M and V keys, but their prefix
|
||||
// length is the same so we can remove their prefix without
|
||||
// looking at the type of key
|
||||
return this.filterCommon(obj.key.slice(DbPrefixes.Master.length),
|
||||
obj.value);
|
||||
}
|
||||
|
||||
filterCommon(key, value) {
|
||||
if (this.prefix && !key.startsWith(this.prefix)) {
|
||||
if (this.prefix && !obj.key.startsWith(this.prefix)) {
|
||||
return FILTER_SKIP;
|
||||
}
|
||||
let nonversionedKey;
|
||||
let versionId = undefined;
|
||||
const versionIdIndex = key.indexOf(VID_SEP);
|
||||
let key = obj.key; // original key
|
||||
let versionId = undefined; // versionId
|
||||
const versionIdIndex = obj.key.indexOf(VID_SEP);
|
||||
if (versionIdIndex < 0) {
|
||||
nonversionedKey = key;
|
||||
this.masterKey = key;
|
||||
this.masterKey = obj.key;
|
||||
this.masterVersionId =
|
||||
Version.from(value).getVersionId() || 'null';
|
||||
Version.from(obj.value).getVersionId() || 'null';
|
||||
versionId = this.masterVersionId;
|
||||
} else {
|
||||
nonversionedKey = key.slice(0, versionIdIndex);
|
||||
versionId = key.slice(versionIdIndex + 1);
|
||||
if (this.masterKey === nonversionedKey && this.masterVersionId === versionId) {
|
||||
// eslint-disable-next-line
|
||||
key = obj.key.slice(0, versionIdIndex);
|
||||
// eslint-disable-next-line
|
||||
versionId = obj.key.slice(versionIdIndex + 1);
|
||||
if (this.masterKey === key && this.masterVersionId === versionId) {
|
||||
return FILTER_ACCEPT; // trick repd to not increase its streak
|
||||
}
|
||||
this.masterKey = undefined;
|
||||
|
@ -213,15 +125,15 @@ class DelimiterVersions extends Delimiter {
|
|||
}
|
||||
if (this.delimiter) {
|
||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||
const delimiterIndex = nonversionedKey.indexOf(this.delimiter, baseIndex);
|
||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||
if (delimiterIndex >= 0) {
|
||||
return this.addCommonPrefix(nonversionedKey, delimiterIndex);
|
||||
return this.addCommonPrefix(key, delimiterIndex);
|
||||
}
|
||||
}
|
||||
return this.addContents({ key: nonversionedKey, value, versionId });
|
||||
return this.addContents({ key, value: obj.value, versionId });
|
||||
}
|
||||
|
||||
skippingV0() {
|
||||
skipping() {
|
||||
if (this.NextMarker) {
|
||||
const index = this.NextMarker.lastIndexOf(this.delimiter);
|
||||
if (index === this.NextMarker.length - 1) {
|
||||
|
@ -231,16 +143,6 @@ class DelimiterVersions extends Delimiter {
|
|||
return SKIP_NONE;
|
||||
}
|
||||
|
||||
skippingV1() {
|
||||
const skipV0 = this.skippingV0();
|
||||
if (skipV0 === SKIP_NONE) {
|
||||
return SKIP_NONE;
|
||||
}
|
||||
// skip to the same object key in both M and V range listings
|
||||
return [DbPrefixes.Master + skipV0,
|
||||
DbPrefixes.Version + skipV0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Return an object containing all mandatory fields to use once the
|
||||
* iteration is done, doesn't show a NextMarker field if the output
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
const { DbPrefixes } = require('../../versioning/constants').VersioningConstants;
|
||||
|
||||
// constants for extensions
|
||||
const SKIP_NONE = undefined; // to be inline with the values of NextMarker
|
||||
const FILTER_ACCEPT = 1;
|
||||
|
@ -33,36 +31,9 @@ function inc(str) {
|
|||
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform listing parameters for v0 versioning key format to make
|
||||
* it compatible with v1 format
|
||||
*
|
||||
* @param {object} v0params - listing parameters for v0 format
|
||||
* @return {object} - listing parameters for v1 format
|
||||
*/
|
||||
function listingParamsMasterKeysV0ToV1(v0params) {
|
||||
const v1params = Object.assign({}, v0params);
|
||||
if (v0params.gt !== undefined) {
|
||||
v1params.gt = `${DbPrefixes.Master}${v0params.gt}`;
|
||||
} else if (v0params.gte !== undefined) {
|
||||
v1params.gte = `${DbPrefixes.Master}${v0params.gte}`;
|
||||
} else {
|
||||
v1params.gte = DbPrefixes.Master;
|
||||
}
|
||||
if (v0params.lt !== undefined) {
|
||||
v1params.lt = `${DbPrefixes.Master}${v0params.lt}`;
|
||||
} else if (v0params.lte !== undefined) {
|
||||
v1params.lte = `${DbPrefixes.Master}${v0params.lte}`;
|
||||
} else {
|
||||
v1params.lt = inc(DbPrefixes.Master); // stop after the last master key
|
||||
}
|
||||
return v1params;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkLimit,
|
||||
inc,
|
||||
listingParamsMasterKeysV0ToV1,
|
||||
SKIP_NONE,
|
||||
FILTER_END,
|
||||
FILTER_SKIP,
|
||||
|
|
|
@ -1,106 +0,0 @@
|
|||
const stream = require('stream');
|
||||
|
||||
class MergeStream extends stream.Readable {
|
||||
constructor(stream1, stream2, compare) {
|
||||
super({ objectMode: true });
|
||||
|
||||
this._compare = compare;
|
||||
this._streams = [stream1, stream2];
|
||||
|
||||
// peekItems elements represent the latest item consumed from
|
||||
// the respective input stream but not yet pushed. It can also
|
||||
// be one of the following special values:
|
||||
// - undefined: stream hasn't started emitting items
|
||||
// - null: EOF reached and no more item to peek
|
||||
this._peekItems = [undefined, undefined];
|
||||
this._streamEof = [false, false];
|
||||
this._streamToResume = null;
|
||||
|
||||
stream1.on('data', item => this._onItem(stream1, item, 0, 1));
|
||||
stream1.once('end', () => this._onEnd(stream1, 0, 1));
|
||||
stream1.once('error', err => this._onError(stream1, err, 0, 1));
|
||||
|
||||
stream2.on('data', item => this._onItem(stream2, item, 1, 0));
|
||||
stream2.once('end', () => this._onEnd(stream2, 1, 0));
|
||||
stream2.once('error', err => this._onError(stream2, err, 1, 0));
|
||||
}
|
||||
|
||||
_read() {
|
||||
if (this._streamToResume) {
|
||||
this._streamToResume.resume();
|
||||
this._streamToResume = null;
|
||||
}
|
||||
}
|
||||
|
||||
_destroy(err, callback) {
|
||||
for (let i = 0; i < 2; ++i) {
|
||||
if (!this._streamEof[i]) {
|
||||
this._streams[i].destroy();
|
||||
}
|
||||
}
|
||||
callback();
|
||||
}
|
||||
|
||||
_onItem(myStream, myItem, myIndex, otherIndex) {
|
||||
this._peekItems[myIndex] = myItem;
|
||||
const otherItem = this._peekItems[otherIndex];
|
||||
if (otherItem === undefined) {
|
||||
// wait for the other stream to wake up
|
||||
return myStream.pause();
|
||||
}
|
||||
if (otherItem === null || this._compare(myItem, otherItem) <= 0) {
|
||||
if (!this.push(myItem)) {
|
||||
myStream.pause();
|
||||
this._streamToResume = myStream;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
const otherStream = this._streams[otherIndex];
|
||||
const otherMore = this.push(otherItem);
|
||||
if (this._streamEof[otherIndex]) {
|
||||
this._peekItems[otherIndex] = null;
|
||||
return this.push(myItem);
|
||||
}
|
||||
myStream.pause();
|
||||
if (otherMore) {
|
||||
return otherStream.resume();
|
||||
}
|
||||
this._streamToResume = otherStream;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
_onEnd(myStream, myIndex, otherIndex) {
|
||||
this._streamEof[myIndex] = true;
|
||||
if (this._peekItems[myIndex] === undefined) {
|
||||
this._peekItems[myIndex] = null;
|
||||
}
|
||||
const myItem = this._peekItems[myIndex];
|
||||
const otherItem = this._peekItems[otherIndex];
|
||||
if (otherItem === undefined) {
|
||||
// wait for the other stream to wake up
|
||||
return undefined;
|
||||
}
|
||||
if (otherItem === null) {
|
||||
return this.push(null);
|
||||
}
|
||||
if (myItem === null || this._compare(myItem, otherItem) <= 0) {
|
||||
this.push(otherItem);
|
||||
this._peekItems[myIndex] = null;
|
||||
}
|
||||
if (this._streamEof[otherIndex]) {
|
||||
return this.push(null);
|
||||
}
|
||||
const otherStream = this._streams[otherIndex];
|
||||
return otherStream.resume();
|
||||
}
|
||||
|
||||
_onError(myStream, err, myIndex, otherIndex) {
|
||||
myStream.destroy();
|
||||
if (this._streams[otherIndex]) {
|
||||
this._streams[otherIndex].destroy();
|
||||
}
|
||||
this.emit('error', err);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MergeStream;
|
|
@ -24,12 +24,6 @@ function vaultSignatureCb(err, authInfo, log, callback, streamingV4Params) {
|
|||
const info = authInfo.message.body;
|
||||
const userInfo = new AuthInfo(info.userInfo);
|
||||
const authorizationResults = info.authorizationResults;
|
||||
const auditLog = { accountDisplayName: userInfo.getAccountDisplayName() };
|
||||
const iamDisplayName = userInfo.getIAMdisplayName();
|
||||
if (iamDisplayName) {
|
||||
auditLog.IAMdisplayName = iamDisplayName;
|
||||
}
|
||||
log.addDefaultFields(auditLog);
|
||||
return callback(null, userInfo, authorizationResults, streamingV4Params);
|
||||
}
|
||||
|
||||
|
|
|
@ -34,13 +34,8 @@ function check(request, log, data) {
|
|||
}
|
||||
|
||||
const currentTime = Date.now();
|
||||
|
||||
const preSignedURLExpiry = process.env.PRE_SIGN_URL_EXPIRY
|
||||
&& !Number.isNaN(process.env.PRE_SIGN_URL_EXPIRY)
|
||||
? Number.parseInt(process.env.PRE_SIGN_URL_EXPIRY, 10)
|
||||
: constants.defaultPreSignedURLExpiry * 1000;
|
||||
|
||||
if (expirationTime > currentTime + preSignedURLExpiry) {
|
||||
// 604800000 ms (seven days).
|
||||
if (expirationTime > currentTime + 604800000) {
|
||||
log.debug('expires parameter too far in future',
|
||||
{ expires: request.query.Expires });
|
||||
return { err: errors.AccessDenied };
|
||||
|
|
|
@ -72,22 +72,4 @@ module.exports = {
|
|||
permittedCapitalizedBuckets: {
|
||||
METADATA: true,
|
||||
},
|
||||
// HTTP server keep-alive timeout is set to a higher value than
|
||||
// client's free sockets timeout to avoid the risk of triggering
|
||||
// ECONNRESET errors if the server closes the connection at the
|
||||
// exact moment clients attempt to reuse an established connection
|
||||
// for a new request.
|
||||
//
|
||||
// Note: the ability to close inactive connections on the client
|
||||
// after httpClientFreeSocketsTimeout milliseconds requires the
|
||||
// use of "agentkeepalive" module instead of the regular node.js
|
||||
// http.Agent.
|
||||
httpServerKeepAliveTimeout: 60000,
|
||||
httpClientFreeSocketTimeout: 55000,
|
||||
// Default expiration value of the S3 pre-signed URL duration
|
||||
// 604800 seconds (seven days).
|
||||
defaultPreSignedURLExpiry: 7 * 24 * 60 * 60,
|
||||
// Regex for ISO-8601 formatted date
|
||||
shortIso8601Regex: /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/,
|
||||
longIso8601Regex: /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}Z/,
|
||||
};
|
||||
|
|
|
@ -2,11 +2,10 @@ const assert = require('assert');
|
|||
const { WebsiteConfiguration } = require('./WebsiteConfiguration');
|
||||
const ReplicationConfiguration = require('./ReplicationConfiguration');
|
||||
const LifecycleConfiguration = require('./LifecycleConfiguration');
|
||||
const ObjectLockConfiguration = require('./ObjectLockConfiguration');
|
||||
const BucketPolicy = require('./BucketPolicy');
|
||||
|
||||
// WHEN UPDATING THIS NUMBER, UPDATE MODELVERSION.MD CHANGELOG
|
||||
const modelVersion = 7;
|
||||
const modelVersion = 6;
|
||||
|
||||
class BucketInfo {
|
||||
/**
|
||||
|
@ -50,15 +49,13 @@ class BucketInfo {
|
|||
* @param {object} [replicationConfiguration] - replication configuration
|
||||
* @param {object} [lifecycleConfiguration] - lifecycle configuration
|
||||
* @param {object} [bucketPolicy] - bucket policy
|
||||
* @param {boolean} [objectLockEnabled] - true when object lock enabled
|
||||
* @param {object} [objectLockConfiguration] - object lock configuration
|
||||
*/
|
||||
constructor(name, owner, ownerDisplayName, creationDate,
|
||||
mdBucketModelVersion, acl, transient, deleted,
|
||||
serverSideEncryption, versioningConfiguration,
|
||||
locationConstraint, websiteConfiguration, cors,
|
||||
replicationConfiguration, lifecycleConfiguration,
|
||||
bucketPolicy, objectLockEnabled, objectLockConfiguration) {
|
||||
bucketPolicy) {
|
||||
assert.strictEqual(typeof name, 'string');
|
||||
assert.strictEqual(typeof owner, 'string');
|
||||
assert.strictEqual(typeof ownerDisplayName, 'string');
|
||||
|
@ -121,9 +118,6 @@ class BucketInfo {
|
|||
if (bucketPolicy) {
|
||||
BucketPolicy.validatePolicy(bucketPolicy);
|
||||
}
|
||||
if (objectLockConfiguration) {
|
||||
ObjectLockConfiguration.validateConfig(objectLockConfiguration);
|
||||
}
|
||||
const aclInstance = acl || {
|
||||
Canned: 'private',
|
||||
FULL_CONTROL: [],
|
||||
|
@ -150,8 +144,6 @@ class BucketInfo {
|
|||
this._cors = cors || null;
|
||||
this._lifecycleConfiguration = lifecycleConfiguration || null;
|
||||
this._bucketPolicy = bucketPolicy || null;
|
||||
this._objectLockEnabled = objectLockEnabled || false;
|
||||
this._objectLockConfiguration = objectLockConfiguration || null;
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
|
@ -176,8 +168,6 @@ class BucketInfo {
|
|||
replicationConfiguration: this._replicationConfiguration,
|
||||
lifecycleConfiguration: this._lifecycleConfiguration,
|
||||
bucketPolicy: this._bucketPolicy,
|
||||
objectLockEnabled: this._objectLockEnabled,
|
||||
objectLockConfiguration: this._objectLockConfiguration,
|
||||
};
|
||||
if (this._websiteConfiguration) {
|
||||
bucketInfos.websiteConfiguration =
|
||||
|
@ -199,8 +189,7 @@ class BucketInfo {
|
|||
obj.transient, obj.deleted, obj.serverSideEncryption,
|
||||
obj.versioningConfiguration, obj.locationConstraint, websiteConfig,
|
||||
obj.cors, obj.replicationConfiguration, obj.lifecycleConfiguration,
|
||||
obj.bucketPolicy, obj.objectLockEnabled,
|
||||
obj.objectLockConfiguration);
|
||||
obj.bucketPolicy);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -224,8 +213,7 @@ class BucketInfo {
|
|||
data._versioningConfiguration, data._locationConstraint,
|
||||
data._websiteConfiguration, data._cors,
|
||||
data._replicationConfiguration, data._lifecycleConfiguration,
|
||||
data._bucketPolicy, data._objectLockEnabled,
|
||||
data._objectLockConfiguration);
|
||||
data._bucketPolicy);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -370,23 +358,6 @@ class BucketInfo {
|
|||
this._bucketPolicy = bucketPolicy;
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Get object lock configuration
|
||||
* @return {object|null} object lock configuration information or `null` if
|
||||
* the bucket does not have an object lock configuration
|
||||
*/
|
||||
getObjectLockConfiguration() {
|
||||
return this._objectLockConfiguration;
|
||||
}
|
||||
/**
|
||||
* Set object lock configuration
|
||||
* @param {object} objectLockConfiguration - object lock information
|
||||
* @return {BucketInfo} - bucket info instance
|
||||
*/
|
||||
setObjectLockConfiguration(objectLockConfiguration) {
|
||||
this._objectLockConfiguration = objectLockConfiguration;
|
||||
return this;
|
||||
}
|
||||
/**
|
||||
* Get cors resource
|
||||
* @return {object[]} cors
|
||||
|
@ -577,22 +548,6 @@ class BucketInfo {
|
|||
return this._versioningConfiguration &&
|
||||
this._versioningConfiguration.Status === 'Enabled';
|
||||
}
|
||||
/**
|
||||
* Check if object lock is enabled.
|
||||
* @return {boolean} - depending on whether object lock is enabled
|
||||
*/
|
||||
isObjectLockEnabled() {
|
||||
return !!this._objectLockEnabled;
|
||||
}
|
||||
/**
|
||||
* Set the value of objectLockEnabled field.
|
||||
* @param {boolean} enabled - true if object lock enabled else false.
|
||||
* @return {BucketInfo} - bucket info instance
|
||||
*/
|
||||
setObjectLockEnabled(enabled) {
|
||||
this._objectLockEnabled = enabled;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = BucketInfo;
|
||||
|
|
|
@ -36,19 +36,6 @@ const { validateResourcePolicy } = require('../policy/policyValidator');
|
|||
* }
|
||||
*/
|
||||
|
||||
const objectActions = [
|
||||
's3:AbortMultipartUpload',
|
||||
's3:DeleteObject',
|
||||
's3:DeleteObjectTagging',
|
||||
's3:GetObject',
|
||||
's3:GetObjectAcl',
|
||||
's3:GetObjectTagging',
|
||||
's3:ListMultipartUploadParts',
|
||||
's3:PutObject',
|
||||
's3:PutObjectAcl',
|
||||
's3:PutObjectTagging',
|
||||
];
|
||||
|
||||
class BucketPolicy {
|
||||
/**
|
||||
* Create a Bucket Policy instance
|
||||
|
@ -109,8 +96,7 @@ class BucketPolicy {
|
|||
[s.Action] : s.Action;
|
||||
const resources = typeof s.Resource === 'string' ?
|
||||
[s.Resource] : s.Resource;
|
||||
const objectAction = actions.some(a =>
|
||||
a.includes('Object') || objectActions.includes(a));
|
||||
const objectAction = actions.some(a => a.includes('Object'));
|
||||
// wildcardObjectAction checks for actions such as 's3:*' or
|
||||
// 's3:Put*' but will return false for actions such as
|
||||
// 's3:PutBucket*'
|
||||
|
|
|
@ -1,238 +0,0 @@
|
|||
const assert = require('assert');
|
||||
|
||||
const errors = require('../errors');
|
||||
|
||||
/**
|
||||
* Format of xml request:
|
||||
*
|
||||
* <ObjectLockConfiguration>
|
||||
* <ObjectLockEnabled>Enabled</ObjectLockEnabled>
|
||||
* <Rule>
|
||||
* <DefaultRetention>
|
||||
* <Mode>GOVERNANCE|COMPLIANCE</Mode>
|
||||
* <Days>1</Days>
|
||||
* <Years>1</Years>
|
||||
* </DefaultRetention>
|
||||
* </Rule>
|
||||
* </ObjectLockConfiguration>
|
||||
*/
|
||||
|
||||
/**
|
||||
* Format of config:
|
||||
*
|
||||
* config = {
|
||||
* rule: {
|
||||
* mode: GOVERNANCE|COMPLIANCE,
|
||||
* days|years: integer,
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
class ObjectLockConfiguration {
|
||||
/**
|
||||
* Create an Object Lock Configuration instance
|
||||
* @param {string} xml - the parsed configuration xml
|
||||
* @return {object} - ObjectLockConfiguration instance
|
||||
*/
|
||||
constructor(xml) {
|
||||
this._parsedXml = xml;
|
||||
this._config = {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the object lock configuration
|
||||
* @return {object} - contains error if parsing failed
|
||||
*/
|
||||
getValidatedObjectLockConfiguration() {
|
||||
const validConfig = this._parseObjectLockConfig();
|
||||
if (validConfig.error) {
|
||||
this._config.error = validConfig.error;
|
||||
}
|
||||
return this._config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that mode is valid
|
||||
* @param {array} mode - array containing mode value
|
||||
* @return {object} - contains error if parsing failed
|
||||
*/
|
||||
_parseMode(mode) {
|
||||
const validMode = {};
|
||||
const expectedModes = ['GOVERNANCE', 'COMPLIANCE'];
|
||||
if (!mode || !mode[0]) {
|
||||
validMode.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not contain Mode');
|
||||
return validMode;
|
||||
}
|
||||
if (mode.length > 1) {
|
||||
validMode.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml contains more than one Mode');
|
||||
return validMode;
|
||||
}
|
||||
if (!expectedModes.includes(mode[0])) {
|
||||
validMode.error = errors.MalformedXML.customizeDescription(
|
||||
'Mode request xml must be one of "GOVERNANCE", "COMPLIANCE"');
|
||||
return validMode;
|
||||
}
|
||||
validMode.mode = mode[0];
|
||||
return validMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that time limit is valid
|
||||
* @param {object} dr - DefaultRetention object containing days or years
|
||||
* @return {object} - contains error if parsing failed
|
||||
*/
|
||||
_parseTime(dr) {
|
||||
const validTime = {};
|
||||
if (dr.Days && dr.Years) {
|
||||
validTime.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml contains both Days and Years');
|
||||
return validTime;
|
||||
}
|
||||
const timeType = dr.Days ? 'Days' : 'Years';
|
||||
if (!dr[timeType] || !dr[timeType][0]) {
|
||||
validTime.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not contain Days or Years');
|
||||
return validTime;
|
||||
}
|
||||
if (dr[timeType].length > 1) {
|
||||
validTime.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml contains more than one retention period');
|
||||
return validTime;
|
||||
}
|
||||
const timeValue = Number.parseInt(dr[timeType][0], 10);
|
||||
if (Number.isNaN(timeValue)) {
|
||||
validTime.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not contain valid retention period');
|
||||
return validTime;
|
||||
}
|
||||
if (timeValue < 1) {
|
||||
validTime.error = errors.InvalidArgument.customizeDescription(
|
||||
'retention period must be a positive integer');
|
||||
return validTime;
|
||||
}
|
||||
if ((timeType === 'Days' && timeValue > 36500) ||
|
||||
(timeType === 'Years' && timeValue > 100)) {
|
||||
validTime.error = errors.InvalidArgument.customizeDescription(
|
||||
'retention period is too large');
|
||||
return validTime;
|
||||
}
|
||||
validTime.timeType = timeType.toLowerCase();
|
||||
validTime.timeValue = timeValue;
|
||||
return validTime;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that object lock configuration is valid
|
||||
* @return {object} - contains error if parsing failed
|
||||
*/
|
||||
_parseObjectLockConfig() {
|
||||
const validConfig = {};
|
||||
if (!this._parsedXml || this._parsedXml === '') {
|
||||
validConfig.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml is undefined or empty');
|
||||
return validConfig;
|
||||
}
|
||||
const objectLockConfig = this._parsedXml.ObjectLockConfiguration;
|
||||
if (!objectLockConfig || objectLockConfig === '') {
|
||||
validConfig.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not include ObjectLockConfiguration');
|
||||
return validConfig;
|
||||
}
|
||||
const objectLockEnabled = objectLockConfig.ObjectLockEnabled;
|
||||
if (!objectLockEnabled || objectLockEnabled[0] !== 'Enabled') {
|
||||
validConfig.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not include valid ObjectLockEnabled');
|
||||
return validConfig;
|
||||
}
|
||||
const ruleArray = objectLockConfig.Rule;
|
||||
if (ruleArray) {
|
||||
if (ruleArray.length > 1) {
|
||||
validConfig.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml contains more than one rule');
|
||||
return validConfig;
|
||||
}
|
||||
const drArray = ruleArray[0].DefaultRetention;
|
||||
if (!drArray || !drArray[0] || drArray[0] === '') {
|
||||
validConfig.error = errors.MalformedXML.customizeDescription(
|
||||
'Rule request xml does not contain DefaultRetention');
|
||||
return validConfig;
|
||||
}
|
||||
if (!drArray[0].Mode || (!drArray[0].Days && !drArray[0].Years)) {
|
||||
validConfig.error = errors.MalformedXML.customizeDescription(
|
||||
'DefaultRetention request xml does not contain Mode or ' +
|
||||
'retention period (Days or Years)');
|
||||
return validConfig;
|
||||
}
|
||||
const validMode = this._parseMode(drArray[0].Mode);
|
||||
if (validMode.error) {
|
||||
validConfig.error = validMode.error;
|
||||
return validConfig;
|
||||
}
|
||||
const validTime = this._parseTime(drArray[0]);
|
||||
if (validTime.error) {
|
||||
validConfig.error = validTime.error;
|
||||
return validConfig;
|
||||
}
|
||||
this._config.rule = {};
|
||||
this._config.rule.mode = validMode.mode;
|
||||
this._config.rule[validTime.timeType] = validTime.timeValue;
|
||||
}
|
||||
return validConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate the bucket metadata object lock configuration structure and
|
||||
* value types
|
||||
* @param {object} config - The object lock configuration to validate
|
||||
* @return {undefined}
|
||||
*/
|
||||
static validateConfig(config) {
|
||||
assert.strictEqual(typeof config, 'object');
|
||||
const rule = config.rule;
|
||||
if (rule) {
|
||||
assert.strictEqual(typeof rule, 'object');
|
||||
assert.strictEqual(typeof rule.mode, 'string');
|
||||
if (rule.days) {
|
||||
assert.strictEqual(typeof rule.days, 'number');
|
||||
} else {
|
||||
assert.strictEqual(typeof rule.years, 'number');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the XML representation of the configuration object
|
||||
* @param {object} config - The bucket object lock configuration
|
||||
* @return {string} - The XML representation of the configuration
|
||||
*/
|
||||
static getConfigXML(config) {
|
||||
// object lock is enabled on the bucket but object lock configuration
|
||||
// not set
|
||||
if (config.rule === undefined) {
|
||||
return '<?xml version="1.0" encoding="UTF-8"?>' +
|
||||
'<ObjectLockConfiguration ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<ObjectLockEnabled>Enabled</ObjectLockEnabled>' +
|
||||
'</ObjectLockConfiguration>';
|
||||
}
|
||||
const { days, years, mode } = config.rule;
|
||||
const Mode = `<Mode>${mode}</Mode>`;
|
||||
const Days = days !== undefined ? `<Days>${days}</Days>` : '';
|
||||
const Years = years !== undefined ? `<Years>${years}</Years>` : '';
|
||||
const Time = Days || Years;
|
||||
return '<?xml version="1.0" encoding="UTF-8"?>' +
|
||||
'<ObjectLockConfiguration ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<ObjectLockEnabled>Enabled</ObjectLockEnabled>' +
|
||||
'<Rule>' +
|
||||
'<DefaultRetention>' +
|
||||
`${Mode}` +
|
||||
`${Time}` +
|
||||
'</DefaultRetention>' +
|
||||
'</Rule>' +
|
||||
'</ObjectLockConfiguration>';
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = ObjectLockConfiguration;
|
|
@ -106,12 +106,10 @@ class ObjectMD {
|
|||
},
|
||||
'key': '',
|
||||
'location': null,
|
||||
// versionId, isNull, nullVersionId and isDeleteMarker
|
||||
// should be undefined when not set explicitly
|
||||
'isNull': undefined,
|
||||
'nullVersionId': undefined,
|
||||
'isDeleteMarker': undefined,
|
||||
'versionId': undefined,
|
||||
'isNull': '',
|
||||
'nullVersionId': '',
|
||||
'isDeleteMarker': '',
|
||||
'versionId': undefined, // If no versionId, it should be undefined
|
||||
'tags': {},
|
||||
'replicationInfo': {
|
||||
status: '',
|
||||
|
@ -607,7 +605,7 @@ class ObjectMD {
|
|||
* @return {boolean} Whether new version is null or not
|
||||
*/
|
||||
getIsNull() {
|
||||
return this._data.isNull || false;
|
||||
return this._data.isNull;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -624,7 +622,7 @@ class ObjectMD {
|
|||
/**
|
||||
* Get metadata nullVersionId value
|
||||
*
|
||||
* @return {string|undefined} The version id of the null version
|
||||
* @return {string} The version id of the null version
|
||||
*/
|
||||
getNullVersionId() {
|
||||
return this._data.nullVersionId;
|
||||
|
@ -647,7 +645,7 @@ class ObjectMD {
|
|||
* @return {boolean} Whether object is a delete marker
|
||||
*/
|
||||
getIsDeleteMarker() {
|
||||
return this._data.isDeleteMarker || false;
|
||||
return this._data.isDeleteMarker;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -664,7 +662,7 @@ class ObjectMD {
|
|||
/**
|
||||
* Get metadata versionId value
|
||||
*
|
||||
* @return {string|undefined} The object versionId
|
||||
* @return {string} The object versionId
|
||||
*/
|
||||
getVersionId() {
|
||||
return this._data.versionId;
|
||||
|
@ -674,7 +672,7 @@ class ObjectMD {
|
|||
* Get metadata versionId value in encoded form (the one visible
|
||||
* to the S3 API user)
|
||||
*
|
||||
* @return {string|undefined} The encoded object versionId
|
||||
* @return {string} The encoded object versionId
|
||||
*/
|
||||
getEncodedVersionId() {
|
||||
return VersionIDUtils.encode(this.getVersionId());
|
||||
|
@ -883,60 +881,6 @@ class ObjectMD {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set object legal hold status
|
||||
* @param {boolean} legalHold - true if legal hold is 'ON' false if 'OFF'
|
||||
* @return {ObjectMD} itself
|
||||
*/
|
||||
setLegalHold(legalHold) {
|
||||
this._data.legalHold = legalHold || false;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get object legal hold status
|
||||
* @return {boolean} legal hold status
|
||||
*/
|
||||
getLegalHold() {
|
||||
return this._data.legalHold || false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set object retention mode
|
||||
* @param {string} mode - should be one of 'GOVERNANCE', 'COMPLIANCE'
|
||||
* @return {ObjectMD} itself
|
||||
*/
|
||||
setRetentionMode(mode) {
|
||||
this._data.retentionMode = mode;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set object retention retain until date
|
||||
* @param {string} date - date in ISO-8601 format
|
||||
* @return {ObjectMD} itself
|
||||
*/
|
||||
setRetentionDate(date) {
|
||||
this._data.retentionDate = date;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns object retention mode
|
||||
* @return {string} retention mode string
|
||||
*/
|
||||
getRetentionMode() {
|
||||
return this._data.retentionMode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns object retention retain until date
|
||||
* @return {string} retention date string
|
||||
*/
|
||||
getRetentionDate() {
|
||||
return this._data.retentionDate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns metadata object
|
||||
*
|
||||
|
|
|
@ -43,7 +43,6 @@ class Server {
|
|||
this._address = checkSupportIPv6() ? '::' : '0.0.0.0';
|
||||
this._server = null;
|
||||
this._logger = logger;
|
||||
this._keepAliveTimeout = null; // null: use default node.js value
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -58,19 +57,6 @@ class Server {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the keep-alive timeout after which inactive client
|
||||
* connections are automatically closed (default should be
|
||||
* 5 seconds in node.js)
|
||||
*
|
||||
* @param {number} keepAliveTimeout - keep-alive timeout in milliseconds
|
||||
* @return {Server} - returns this
|
||||
*/
|
||||
setKeepAliveTimeout(keepAliveTimeout) {
|
||||
this._keepAliveTimeout = keepAliveTimeout;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Getter to access to the http/https server
|
||||
*
|
||||
|
@ -415,9 +401,6 @@ class Server {
|
|||
this._server = http.createServer(
|
||||
(req, res) => this._onRequest(req, res));
|
||||
}
|
||||
if (this._keepAliveTimeout) {
|
||||
this._server.keepAliveTimeout = this._keepAliveTimeout;
|
||||
}
|
||||
|
||||
this._server.on('error', err => this._onError(err));
|
||||
this._server.on('secureConnection',
|
||||
|
|
|
@ -113,7 +113,7 @@ class TransportTemplate {
|
|||
|
||||
_doSend(logger, encodedMessage, cb) {
|
||||
const socket = this.socket;
|
||||
if (!socket || socket.destroyed) {
|
||||
if (!socket) {
|
||||
const error = new Error('Socket to server not available');
|
||||
logger.error('TransportTemplate::_doSend', { error });
|
||||
return cb(error);
|
||||
|
@ -138,7 +138,7 @@ class TransportTemplate {
|
|||
return this.deferedRequests.push({ encodedMessage, cb });
|
||||
}
|
||||
assert(encodedMessage.length !== 0);
|
||||
if (this.socket === null || this.socket.destroyed) {
|
||||
if (this.socket === null) {
|
||||
return this._createConversation(logger, err => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
|
|
|
@ -54,7 +54,6 @@ const _actionMap = {
|
|||
objectPut: 's3:PutObject',
|
||||
objectPutACL: 's3:PutObjectAcl',
|
||||
objectPutACLVersion: 's3:PutObjectVersionAcl',
|
||||
objectPutLegalHold: 's3:PutObjectLegalHold',
|
||||
objectPutPart: 's3:PutObject',
|
||||
objectPutTagging: 's3:PutObjectTagging',
|
||||
objectPutTaggingVersion: 's3:PutObjectVersionTagging',
|
||||
|
@ -97,9 +96,6 @@ const _actionMapIAM = {
|
|||
listUsers: 'iam:ListUsers',
|
||||
putGroupPolicy: 'iam:PutGroupPolicy',
|
||||
removeUserFromGroup: 'iam:RemoveUserFromGroup',
|
||||
updateAccessKey: 'iam:UpdateAccessKey',
|
||||
updateGroup: 'iam:UpdateGroup',
|
||||
updateUser: 'iam:UpdateUser',
|
||||
};
|
||||
|
||||
const _actionMapSSO = {
|
||||
|
@ -293,19 +289,15 @@ class RequestContext {
|
|||
/**
|
||||
* deSerialize the JSON string
|
||||
* @param {string} stringRequest - the stringified requestContext
|
||||
* @param {string} resource - individual specificResource
|
||||
* @return {object} - parsed string
|
||||
*/
|
||||
static deSerialize(stringRequest, resource) {
|
||||
static deSerialize(stringRequest) {
|
||||
let obj;
|
||||
try {
|
||||
obj = JSON.parse(stringRequest);
|
||||
} catch (err) {
|
||||
return new Error(err);
|
||||
}
|
||||
if (resource) {
|
||||
obj.specificResource = resource;
|
||||
}
|
||||
return new RequestContext(obj.headers, obj.query, obj.generalResource,
|
||||
obj.specificResource, obj.requesterIp, obj.sslEnabled,
|
||||
obj.apiMethod, obj.awsService, obj.locationConstraint,
|
||||
|
|
|
@ -67,7 +67,7 @@ function isResourceApplicable(requestContext, statementResource, log) {
|
|||
* @param {Object} log - logger
|
||||
* @return {boolean} true if applicable, false if not
|
||||
*/
|
||||
evaluators.isActionApplicable = (requestAction, statementAction, log) => {
|
||||
function isActionApplicable(requestAction, statementAction, log) {
|
||||
if (!Array.isArray(statementAction)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
statementAction = [statementAction];
|
||||
|
@ -89,7 +89,7 @@ evaluators.isActionApplicable = (requestAction, statementAction, log) => {
|
|||
{ requestAction });
|
||||
// If no match found, return false
|
||||
return false;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether request meets policy conditions
|
||||
|
@ -209,14 +209,14 @@ evaluators.evaluatePolicy = (requestContext, policy, log) => {
|
|||
// If affirmative action is in policy and request action is not
|
||||
// applicable, move on to next statement
|
||||
if (currentStatement.Action &&
|
||||
!evaluators.isActionApplicable(requestContext.getAction(),
|
||||
!isActionApplicable(requestContext.getAction(),
|
||||
currentStatement.Action, log)) {
|
||||
continue;
|
||||
}
|
||||
// If NotAction is in policy and action matches NotAction in policy,
|
||||
// move on to next statement
|
||||
if (currentStatement.NotAction &&
|
||||
evaluators.isActionApplicable(requestContext.getAction(),
|
||||
isActionApplicable(requestContext.getAction(),
|
||||
currentStatement.NotAction, log)) {
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
const ipCheck = require('../ipCheck');
|
||||
|
||||
/**
|
||||
* getClientIp - Gets the client IP from the request
|
||||
* @param {object} request - http request object
|
||||
* @param {object} s3config - s3 config
|
||||
* @return {string} - returns client IP from the request
|
||||
*/
|
||||
function getClientIp(request, s3config) {
|
||||
const clientIp = request.socket.remoteAddress;
|
||||
const requestConfig = s3config ? s3config.requests : {};
|
||||
if (requestConfig && requestConfig.viaProxy) {
|
||||
/**
|
||||
* if requests are configured to come via proxy,
|
||||
* check from config which proxies are to be trusted and
|
||||
* which header to be used to extract client IP
|
||||
*/
|
||||
if (ipCheck.ipMatchCidrList(requestConfig.trustedProxyCIDRs,
|
||||
clientIp)) {
|
||||
const ipFromHeader
|
||||
// eslint-disable-next-line operator-linebreak
|
||||
= request.headers[requestConfig.extractClientIPFromHeader];
|
||||
if (ipFromHeader && ipFromHeader.trim().length) {
|
||||
return ipFromHeader.split(',')[0].trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return clientIp;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getClientIp,
|
||||
};
|
|
@ -25,8 +25,7 @@ function checkArnMatch(policyArn, requestRelativeId, requestArnArr,
|
|||
// Check to see if the relative-id matches first since most likely
|
||||
// to diverge. If not a match, the resource is not applicable so return
|
||||
// false
|
||||
if (!policyRelativeIdRegEx.test(caseSensitive ?
|
||||
requestRelativeId : requestRelativeId.toLowerCase())) {
|
||||
if (!policyRelativeIdRegEx.test(requestRelativeId)) {
|
||||
return false;
|
||||
}
|
||||
// Check the other parts of the ARN to make sure they match. If not,
|
||||
|
|
|
@ -1,112 +0,0 @@
|
|||
const { parseString } = require('xml2js');
|
||||
const errors = require('../errors');
|
||||
|
||||
/*
|
||||
Format of the xml request:
|
||||
<LegalHold>
|
||||
<Status>ON|OFF</Status>
|
||||
</LegalHold>
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string[]} status - legal hold status parsed from xml to be validated
|
||||
* @return {Error|object} - legal hold status or error
|
||||
*/
|
||||
function _validateStatus(status) {
|
||||
const validatedStatus = {};
|
||||
const expectedValues = new Set(['OFF', 'ON']);
|
||||
if (!status || status[0] === '') {
|
||||
validatedStatus.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not contain Status');
|
||||
return validatedStatus;
|
||||
}
|
||||
if (status.length > 1) {
|
||||
validatedStatus.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml contains more than one Status');
|
||||
return validatedStatus;
|
||||
}
|
||||
if (!expectedValues.has(status[0])) {
|
||||
validatedStatus.error = errors.MalformedXML.customizeDescription(
|
||||
'Status request xml must be one of "ON", "OFF"');
|
||||
return validatedStatus;
|
||||
}
|
||||
validatedStatus.status = status[0];
|
||||
return validatedStatus;
|
||||
}
|
||||
|
||||
/**
|
||||
* validate legal hold - validates legal hold xml
|
||||
* @param {object} parsedXml - parsed legal hold xml object
|
||||
* @return {object} - object with status or error
|
||||
*/
|
||||
function _validateLegalHold(parsedXml) {
|
||||
const validatedLegalHold = {};
|
||||
if (!parsedXml) {
|
||||
validatedLegalHold.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml is undefined or empty');
|
||||
return validatedLegalHold;
|
||||
}
|
||||
if (!parsedXml.LegalHold) {
|
||||
validatedLegalHold.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not contain LegalHold');
|
||||
return validatedLegalHold;
|
||||
}
|
||||
const validatedStatus = _validateStatus(parsedXml.LegalHold.Status);
|
||||
if (validatedStatus.error) {
|
||||
validatedLegalHold.error = validatedStatus.error;
|
||||
return validatedLegalHold;
|
||||
}
|
||||
validatedLegalHold.status = validatedStatus.status;
|
||||
return validatedLegalHold;
|
||||
}
|
||||
|
||||
/**
|
||||
* parse object legal hold - parse and validate xml body
|
||||
* @param {string} xml - xml body to parse and validate
|
||||
* @param {object} log - werelogs logger
|
||||
* @param {function} cb - callback to server
|
||||
* @return {undefined} - calls callback with legal hold status or error
|
||||
*/
|
||||
function parseLegalHoldXml(xml, log, cb) {
|
||||
parseString(xml, (err, result) => {
|
||||
if (err) {
|
||||
log.debug('xml parsing failed', {
|
||||
error: { message: err.message },
|
||||
method: 'parseLegalHoldXml',
|
||||
xml,
|
||||
});
|
||||
return cb(errors.MalformedXML);
|
||||
}
|
||||
const validatedLegalHold = _validateLegalHold(result);
|
||||
const validatedLegalHoldStatus = validatedLegalHold.status === 'ON';
|
||||
if (validatedLegalHold.error) {
|
||||
log.debug('legal hold validation failed', {
|
||||
error: { message: validatedLegalHold.error.message },
|
||||
method: 'parseLegalHoldXml',
|
||||
xml,
|
||||
});
|
||||
return cb(validatedLegalHold.error);
|
||||
}
|
||||
return cb(null, validatedLegalHoldStatus);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* convert to xml - generates legal hold xml
|
||||
* @param {(boolean|undefined)} legalHold - true if legal hold is on
|
||||
* false if legal hold is off, undefined if legal hold is not set
|
||||
* @return {string} - returns legal hold xml
|
||||
*/
|
||||
function convertToXml(legalHold) {
|
||||
if (!legalHold && legalHold !== false) {
|
||||
return '';
|
||||
}
|
||||
const xml = '<?xml version="1.0" encoding="UTF-8" standalone="yes"?>' +
|
||||
`<LegalHold><Status>${legalHold ? 'ON' : 'OFF'}</Status></LegalHold>`;
|
||||
return xml;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
convertToXml,
|
||||
parseLegalHoldXml,
|
||||
};
|
|
@ -1,156 +0,0 @@
|
|||
const { parseString } = require('xml2js');
|
||||
|
||||
const constants = require('../constants');
|
||||
const errors = require('../errors');
|
||||
|
||||
/*
|
||||
Format of xml request:
|
||||
<Retention>
|
||||
<Mode>COMPLIANCE|GOVERNANCE</Mode>
|
||||
<RetainUntilDate>2020-05-20T04:58:45.413000Z</RetainUntilDate>
|
||||
</Retention>
|
||||
*/
|
||||
|
||||
/**
|
||||
* validateMode - validate retention mode
|
||||
* @param {array} mode - parsed xml mode array
|
||||
* @return {object} - contains mode or error
|
||||
*/
|
||||
function validateMode(mode) {
|
||||
const modeObj = {};
|
||||
const expectedModes = new Set(['GOVERNANCE', 'COMPLIANCE']);
|
||||
if (!mode || !mode[0]) {
|
||||
modeObj.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not contain Mode');
|
||||
return modeObj;
|
||||
}
|
||||
if (mode.length > 1) {
|
||||
modeObj.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml contains more than one Mode');
|
||||
return modeObj;
|
||||
}
|
||||
if (!expectedModes.has(mode[0])) {
|
||||
modeObj.error = errors.MalformedXML.customizeDescription(
|
||||
'Mode request xml must be one of "GOVERNANCE", "COMPLIANCE"');
|
||||
return modeObj;
|
||||
}
|
||||
modeObj.mode = mode[0];
|
||||
return modeObj;
|
||||
}
|
||||
|
||||
/**
|
||||
* validateRetainDate - validate retain until date
|
||||
* @param {array} retainDate - parsed xml retention date array
|
||||
* @return {object} - contains retain until date or error
|
||||
*/
|
||||
function validateRetainDate(retainDate) {
|
||||
const dateObj = {};
|
||||
if (!retainDate || !retainDate[0]) {
|
||||
dateObj.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not contain RetainUntilDate');
|
||||
return dateObj;
|
||||
}
|
||||
if (!constants.shortIso8601Regex.test(retainDate[0]) &&
|
||||
!constants.longIso8601Regex.test(retainDate[0])) {
|
||||
dateObj.error = errors.InvalidRequest.customizeDescription(
|
||||
'RetainUntilDate timestamp must be ISO-8601 format');
|
||||
return dateObj;
|
||||
}
|
||||
const date = new Date(retainDate[0]);
|
||||
if (date < Date.now()) {
|
||||
dateObj.error = errors.InvalidRequest.customizeDescription(
|
||||
'RetainUntilDate must be in the future');
|
||||
return dateObj;
|
||||
}
|
||||
dateObj.date = retainDate[0];
|
||||
return dateObj;
|
||||
}
|
||||
|
||||
/**
|
||||
* validate retention - validate retention xml
|
||||
* @param {object} parsedXml - parsed retention xml object
|
||||
* @return {object} - contains retention information on success,
|
||||
* error on failure
|
||||
*/
|
||||
function validateRetention(parsedXml) {
|
||||
const retentionObj = {};
|
||||
if (!parsedXml) {
|
||||
retentionObj.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml is undefined or empty');
|
||||
return retentionObj;
|
||||
}
|
||||
const retention = parsedXml.Retention;
|
||||
if (!retention) {
|
||||
retentionObj.error = errors.MalformedXML.customizeDescription(
|
||||
'request xml does not contain Retention');
|
||||
return retentionObj;
|
||||
}
|
||||
const modeObj = validateMode(retention.Mode);
|
||||
if (modeObj.error) {
|
||||
retentionObj.error = modeObj.error;
|
||||
return retentionObj;
|
||||
}
|
||||
const dateObj = validateRetainDate(retention.RetainUntilDate);
|
||||
if (dateObj.error) {
|
||||
retentionObj.error = dateObj.error;
|
||||
return retentionObj;
|
||||
}
|
||||
retentionObj.mode = modeObj.mode;
|
||||
retentionObj.date = dateObj.date;
|
||||
return retentionObj;
|
||||
}
|
||||
|
||||
/**
|
||||
* parseRetentionXml - Parse and validate xml body, returning callback with
|
||||
* object retentionObj: { mode: <value>, date: <value> }
|
||||
* @param {string} xml - xml body to parse and validate
|
||||
* @param {object} log - Werelogs logger
|
||||
* @param {function} cb - callback to server
|
||||
* @return {undefined} - calls callback with object retention or error
|
||||
*/
|
||||
function parseRetentionXml(xml, log, cb) {
|
||||
parseString(xml, (err, result) => {
|
||||
if (err) {
|
||||
log.trace('xml parsing failed', {
|
||||
error: err,
|
||||
method: 'parseRetentionXml',
|
||||
});
|
||||
log.debug('invalid xml', { xml });
|
||||
return cb(errors.MalformedXML);
|
||||
}
|
||||
const retentionObj = validateRetention(result);
|
||||
if (retentionObj.error) {
|
||||
log.debug('retention validation failed', {
|
||||
error: retentionObj.error,
|
||||
method: 'validateRetention',
|
||||
xml,
|
||||
});
|
||||
return cb(retentionObj.error);
|
||||
}
|
||||
return cb(null, retentionObj);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* convertToXml - Convert retention info object to xml
|
||||
* @param {string} mode - retention mode
|
||||
* @param {string} date - retention retain until date
|
||||
* @return {string} - returns retention information xml string
|
||||
*/
|
||||
function convertToXml(mode, date) {
|
||||
const xml = [];
|
||||
xml.push('<Retention xmlns="http://s3.amazonaws.com/doc/2006-03-01/">');
|
||||
if (mode && date) {
|
||||
xml.push(`<Mode>${mode}</Mode>`);
|
||||
xml.push(`<RetainUntilDate>${date}</RetainUntilDate>`);
|
||||
} else {
|
||||
return '';
|
||||
}
|
||||
xml.push('</Retention>');
|
||||
return xml.join('');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
parseRetentionXml,
|
||||
convertToXml,
|
||||
};
|
|
@ -3,6 +3,8 @@ const { parseString } = require('xml2js');
|
|||
const errors = require('../errors');
|
||||
const escapeForXml = require('./escapeForXml');
|
||||
|
||||
const tagRegex = new RegExp(/[^a-zA-Z0-9 +-=._:/]/g);
|
||||
|
||||
const errorInvalidArgument = errors.InvalidArgument
|
||||
.customizeDescription('The header \'x-amz-tagging\' shall be ' +
|
||||
'encoded as UTF-8 then URLEncoded URL query parameters without ' +
|
||||
|
@ -46,13 +48,13 @@ const _validator = {
|
|||
),
|
||||
|
||||
validateKeyValue: (key, value) => {
|
||||
if (key.length > 128) {
|
||||
if (key.length > 128 || key.match(tagRegex)) {
|
||||
return errors.InvalidTag.customizeDescription('The TagKey you ' +
|
||||
'have provided is too long, max 128');
|
||||
'have provided is invalid');
|
||||
}
|
||||
if (value.length > 256) {
|
||||
if (value.length > 256 || value.match(tagRegex)) {
|
||||
return errors.InvalidTag.customizeDescription('The TagValue you ' +
|
||||
'have provided is too long, max 256');
|
||||
'have provided is invalid');
|
||||
}
|
||||
return true;
|
||||
},
|
||||
|
|
|
@ -51,11 +51,11 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
|||
});
|
||||
} else if (request.query.lifecycle !== undefined) {
|
||||
api.callApiMethod('bucketGetLifecycle', request, response, log,
|
||||
(err, xml, corsHeaders) => {
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
routesUtils.responseXMLBody(err, xml, response, log,
|
||||
corsHeaders);
|
||||
});
|
||||
(err, xml, corsHeaders) => {
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
routesUtils.responseXMLBody(err, xml, response, log,
|
||||
corsHeaders);
|
||||
});
|
||||
} else if (request.query.uploads !== undefined) {
|
||||
// List MultipartUploads
|
||||
api.callApiMethod('listMultipartUploads', request, response, log,
|
||||
|
@ -73,16 +73,9 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
|||
});
|
||||
} else if (request.query.policy !== undefined) {
|
||||
api.callApiMethod('bucketGetPolicy', request, response, log,
|
||||
(err, xml, corsHeaders) => {
|
||||
(err, json, corsHeaders) => {
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
return routesUtils.responseXMLBody(err, xml, response,
|
||||
log, corsHeaders);
|
||||
});
|
||||
} else if (request.query['object-lock'] !== undefined) {
|
||||
api.callApiMethod('bucketGetObjectLock', request, response, log,
|
||||
(err, xml, corsHeaders) => {
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
return routesUtils.responseXMLBody(err, xml, response,
|
||||
return routesUtils.responseJSONBody(err, json, response,
|
||||
log, corsHeaders);
|
||||
});
|
||||
} else {
|
||||
|
@ -95,6 +88,7 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
|||
});
|
||||
}
|
||||
} else {
|
||||
/* eslint-disable no-lonely-if */
|
||||
if (request.query.acl !== undefined) {
|
||||
// GET object ACL
|
||||
api.callApiMethod('objectGetACL', request, response, log,
|
||||
|
@ -103,14 +97,8 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
|||
return routesUtils.responseXMLBody(err, xml, response, log,
|
||||
corsHeaders);
|
||||
});
|
||||
} else if (request.query['legal-hold'] !== undefined) {
|
||||
api.callApiMethod('objectGetLegalHold', request, response, log,
|
||||
(err, xml, corsHeaders) => {
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
return routesUtils.responseXMLBody(err, xml, response, log,
|
||||
corsHeaders);
|
||||
});
|
||||
} else if (request.query.tagging !== undefined) {
|
||||
// GET object Tagging
|
||||
api.callApiMethod('objectGetTagging', request, response, log,
|
||||
(err, xml, corsHeaders) => {
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
|
@ -125,13 +113,6 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
|||
return routesUtils.responseXMLBody(err, xml, response, log,
|
||||
corsHeaders);
|
||||
});
|
||||
} else if (request.query.retention !== undefined) {
|
||||
api.callApiMethod('objectGetRetention', request, response, log,
|
||||
(err, xml, corsHeaders) => {
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
return routesUtils.responseXMLBody(err, xml, response, log,
|
||||
corsHeaders);
|
||||
});
|
||||
} else {
|
||||
// GET object
|
||||
api.callApiMethod('objectGet', request, response, log,
|
||||
|
@ -147,6 +128,7 @@ function routerGET(request, response, api, log, statsClient, dataRetrievalFn) {
|
|||
range, log);
|
||||
});
|
||||
}
|
||||
/* eslint-enable */
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ function routePUT(request, response, api, log, statsClient) {
|
|||
return routesUtils.responseNoBody(
|
||||
errors.BadRequest, null, response, null, log);
|
||||
}
|
||||
|
||||
// PUT bucket ACL
|
||||
if (request.query.acl !== undefined) {
|
||||
api.callApiMethod('bucketPutACL', request, response, log,
|
||||
|
@ -66,13 +67,6 @@ function routePUT(request, response, api, log, statsClient) {
|
|||
routesUtils.responseNoBody(err, corsHeaders, response, 200,
|
||||
log);
|
||||
});
|
||||
} else if (request.query['object-lock'] !== undefined) {
|
||||
api.callApiMethod('bucketPutObjectLock', request, response, log,
|
||||
(err, corsHeaders) => {
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
routesUtils.responseNoBody(err, corsHeaders, response, 200,
|
||||
log);
|
||||
});
|
||||
} else {
|
||||
// PUT bucket
|
||||
return api.callApiMethod('bucketPut', request, response, log,
|
||||
|
@ -86,8 +80,8 @@ function routePUT(request, response, api, log, statsClient) {
|
|||
});
|
||||
}
|
||||
} else {
|
||||
// PUT object, PUT object ACL, PUT object multipart,
|
||||
// PUT object copy or PUT object legal hold
|
||||
// PUT object, PUT object ACL, PUT object multipart or
|
||||
// PUT object copy
|
||||
// if content-md5 is not present in the headers, try to
|
||||
// parse content-md5 from meta headers
|
||||
|
||||
|
@ -145,13 +139,6 @@ function routePUT(request, response, api, log, statsClient) {
|
|||
return routesUtils.responseNoBody(err, resHeaders,
|
||||
response, 200, log);
|
||||
});
|
||||
} else if (request.query['legal-hold'] !== undefined) {
|
||||
api.callApiMethod('objectPutLegalHold', request, response, log,
|
||||
(err, resHeaders) => {
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
return routesUtils.responseNoBody(err, resHeaders,
|
||||
response, 200, log);
|
||||
});
|
||||
} else if (request.query.tagging !== undefined) {
|
||||
api.callApiMethod('objectPutTagging', request, response, log,
|
||||
(err, resHeaders) => {
|
||||
|
@ -159,13 +146,6 @@ function routePUT(request, response, api, log, statsClient) {
|
|||
return routesUtils.responseNoBody(err, resHeaders,
|
||||
response, 200, log);
|
||||
});
|
||||
} else if (request.query.retention !== undefined) {
|
||||
api.callApiMethod('objectPutRetention', request, response, log,
|
||||
(err, resHeaders) => {
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
return routesUtils.responseNoBody(err, resHeaders,
|
||||
response, 200, log);
|
||||
});
|
||||
} else if (request.headers['x-amz-copy-source']) {
|
||||
return api.callApiMethod('objectCopy', request, response, log,
|
||||
(err, xml, additionalHeaders) => {
|
||||
|
@ -187,6 +167,7 @@ function routePUT(request, response, api, log, statsClient) {
|
|||
log.end().addDefaultFields({
|
||||
contentLength: request.parsedContentLength,
|
||||
});
|
||||
|
||||
api.callApiMethod('objectPut', request, response, log,
|
||||
(err, resHeaders) => {
|
||||
routesUtils.statsReport500(err, statsClient);
|
||||
|
|
|
@ -884,7 +884,7 @@ const routesUtils = {
|
|||
}
|
||||
const ipAddressRegex = new RegExp(/^(\d+\.){3}\d+$/);
|
||||
// eslint-disable-next-line no-useless-escape
|
||||
const dnsRegex = new RegExp(/^[a-z0-9]+((\.|\-+)[a-z0-9]+)*$/);
|
||||
const dnsRegex = new RegExp(/^[a-z0-9]+([\.\-]{1}[a-z0-9]+)*$/);
|
||||
// Must be at least 3 and no more than 63 characters long.
|
||||
if (bucketname.length < 3 || bucketname.length > 63) {
|
||||
return false;
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
const errors = require('../errors');
|
||||
|
||||
/**
|
||||
* read a JSON object from a stream returned as a javascript object,
|
||||
* handle errors.
|
||||
*
|
||||
* @param {stream.Readable} s - Readable stream
|
||||
* @param {@hapi/joi} [joiSchema] - optional validation schema for the JSON object
|
||||
* @return {Promise} a Promise resolved with the parsed JSON object as a result
|
||||
*/
|
||||
async function readJSONStreamObject(s, joiSchema) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const contentsChunks = [];
|
||||
s.on('data', chunk => {
|
||||
contentsChunks.push(chunk);
|
||||
});
|
||||
s.on('end', () => {
|
||||
const contents = contentsChunks.join('');
|
||||
try {
|
||||
const parsedContents = JSON.parse(contents);
|
||||
if (joiSchema) {
|
||||
const { error, value } = joiSchema.validate(parsedContents);
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
return resolve(value);
|
||||
}
|
||||
return resolve(parsedContents);
|
||||
} catch (err) {
|
||||
return reject(errors.InvalidArgument.customizeDescription(
|
||||
`invalid input: ${err.message}`));
|
||||
}
|
||||
});
|
||||
s.once('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = readJSONStreamObject;
|
|
@ -2,16 +2,4 @@ module.exports.VersioningConstants = {
|
|||
VersionId: {
|
||||
Separator: '\0',
|
||||
},
|
||||
DbPrefixes: {
|
||||
Master: '\x7fM',
|
||||
Version: '\x7fV',
|
||||
},
|
||||
BucketVersioningKeyFormat: {
|
||||
current: 'v1',
|
||||
v0: 'v0',
|
||||
v0mig: 'v0mig',
|
||||
v0v1: 'v0v1',
|
||||
v1mig: 'v1mig',
|
||||
v1: 'v1',
|
||||
},
|
||||
};
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"engines": {
|
||||
"node": ">=6.9.5"
|
||||
},
|
||||
"version": "7.5.0",
|
||||
"version": "7.4.3",
|
||||
"description": "Common utilities for the S3 project components",
|
||||
"main": "index.js",
|
||||
"repository": {
|
||||
|
@ -43,10 +43,9 @@
|
|||
"eslint": "2.13.1",
|
||||
"eslint-plugin-react": "^4.3.0",
|
||||
"eslint-config-airbnb": "6.2.0",
|
||||
"eslint-config-scality": "scality/Guidelines#ec33dfb",
|
||||
"eslint-config-scality": "scality/Guidelines#71a059ad",
|
||||
"lolex": "1.5.2",
|
||||
"mocha": "2.5.3",
|
||||
"sinon": "^9.0.2",
|
||||
"temp": "0.8.3"
|
||||
},
|
||||
"scripts": {
|
||||
|
|
|
@ -1,123 +0,0 @@
|
|||
const assert = require('assert');
|
||||
|
||||
const LRUCache = require('../../../../lib/algos/cache/LRUCache');
|
||||
|
||||
describe('LRUCache', () => {
|
||||
it('max 1 entry', () => {
|
||||
const lru = new LRUCache(1);
|
||||
assert.strictEqual(lru.count(), 0);
|
||||
|
||||
assert.strictEqual(lru.add('a', 1), false);
|
||||
assert.strictEqual(lru.add('b', 2), false);
|
||||
assert.strictEqual(lru.add('b', 3), true);
|
||||
assert.strictEqual(lru.count(), 1);
|
||||
|
||||
assert.strictEqual(lru.get('b'), 3);
|
||||
// a has been evicted when b was inserted
|
||||
assert.strictEqual(lru.get('a'), undefined);
|
||||
|
||||
assert.strictEqual(lru.remove('a'), false);
|
||||
assert.strictEqual(lru.remove('b'), true);
|
||||
assert.strictEqual(lru.remove('c'), false);
|
||||
assert.strictEqual(lru.remove('b'), false);
|
||||
assert.strictEqual(lru.count(), 0);
|
||||
assert.strictEqual(lru.get('b'), undefined);
|
||||
});
|
||||
|
||||
it('max 3 entries', () => {
|
||||
const lru = new LRUCache(3);
|
||||
|
||||
assert.strictEqual(lru.add('a', 1), false);
|
||||
assert.strictEqual(lru.add('b', 2), false);
|
||||
assert.strictEqual(lru.add('b', 3), true);
|
||||
assert.strictEqual(lru.count(), 2);
|
||||
|
||||
assert.strictEqual(lru.get('b'), 3);
|
||||
assert.strictEqual(lru.get('a'), 1);
|
||||
assert.strictEqual(lru.add('c', 4), false);
|
||||
assert.strictEqual(lru.count(), 3);
|
||||
|
||||
assert.strictEqual(lru.get('b'), 3);
|
||||
|
||||
// a is the least recently accessed item at the time of
|
||||
// insertion of d, so will be evicted first
|
||||
assert.strictEqual(lru.add('d', 5), false);
|
||||
assert.strictEqual(lru.get('a'), undefined);
|
||||
assert.strictEqual(lru.get('b'), 3);
|
||||
assert.strictEqual(lru.get('c'), 4);
|
||||
assert.strictEqual(lru.get('d'), 5);
|
||||
|
||||
assert.strictEqual(lru.remove('d'), true);
|
||||
assert.strictEqual(lru.remove('c'), true);
|
||||
assert.strictEqual(lru.count(), 1);
|
||||
assert.strictEqual(lru.remove('b'), true);
|
||||
assert.strictEqual(lru.count(), 0);
|
||||
});
|
||||
|
||||
it('max 1000 entries', () => {
|
||||
const lru = new LRUCache(1000);
|
||||
|
||||
for (let i = 0; i < 1000; ++i) {
|
||||
assert.strictEqual(lru.add(`${i}`, i), false);
|
||||
}
|
||||
assert.strictEqual(lru.count(), 1000);
|
||||
for (let i = 0; i < 1000; ++i) {
|
||||
assert.strictEqual(lru.get(`${i}`), i);
|
||||
}
|
||||
for (let i = 999; i >= 0; --i) {
|
||||
assert.strictEqual(lru.get(`${i}`), i);
|
||||
}
|
||||
// this shall evict the least recently accessed items, which
|
||||
// are in the range [500..1000)
|
||||
for (let i = 1000; i < 1500; ++i) {
|
||||
assert.strictEqual(lru.add(`${i}`, i), false);
|
||||
}
|
||||
for (let i = 0; i < 500; ++i) {
|
||||
assert.strictEqual(lru.get(`${i}`), i);
|
||||
}
|
||||
// check evicted items
|
||||
for (let i = 500; i < 1000; ++i) {
|
||||
assert.strictEqual(lru.get(`${i}`), undefined);
|
||||
}
|
||||
|
||||
lru.clear();
|
||||
assert.strictEqual(lru.count(), 0);
|
||||
assert.strictEqual(lru.get(100), undefined);
|
||||
});
|
||||
|
||||
it('max 1000000 entries', function lru1M() {
|
||||
// this test takes ~1-2 seconds on a laptop, nevertheless set a
|
||||
// large timeout to reduce the potential of flakiness on possibly
|
||||
// slower CI environment.
|
||||
this.timeout(30000);
|
||||
|
||||
const lru = new LRUCache(1000000);
|
||||
|
||||
for (let i = 0; i < 1000000; ++i) {
|
||||
assert.strictEqual(lru.add(`${i}`, i), false);
|
||||
}
|
||||
assert.strictEqual(lru.count(), 1000000);
|
||||
// access all even-numbered items to make them the most
|
||||
// recently accessed
|
||||
for (let i = 0; i < 1000000; i += 2) {
|
||||
assert.strictEqual(lru.get(`${i}`), i);
|
||||
}
|
||||
// this shall evict the 500K least recently accessed items,
|
||||
// which are all odd-numbered items
|
||||
for (let i = 1000000; i < 1500000; ++i) {
|
||||
assert.strictEqual(lru.add(`${i}`, i), false);
|
||||
}
|
||||
assert.strictEqual(lru.count(), 1000000);
|
||||
// check present (even) and evicted (odd) items
|
||||
for (let i = 0; i < 1000000; ++i) {
|
||||
assert.strictEqual(lru.get(`${i}`),
|
||||
i % 2 === 0 ? i : undefined);
|
||||
assert.strictEqual(lru.remove(`${i}`), i % 2 === 0);
|
||||
}
|
||||
assert.strictEqual(lru.count(), 500000);
|
||||
for (let i = 1499999; i >= 1000000; --i) {
|
||||
assert.strictEqual(lru.remove(`${i}`), true);
|
||||
}
|
||||
assert.strictEqual(lru.count(), 0);
|
||||
});
|
||||
});
|
|
@ -7,86 +7,80 @@ const werelogs = require('werelogs').Logger;
|
|||
// eslint-disable-next-line new-cap
|
||||
const logger = new werelogs('listMpuTest');
|
||||
const performListing = require('../../../utils/performListing');
|
||||
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
|
||||
const { DbPrefixes } = VSConst;
|
||||
|
||||
describe('Multipart Uploads listing algorithm', () => {
|
||||
const splitter = '**';
|
||||
const overviewPrefix = `overview${splitter}`;
|
||||
const storageClass = 'STANDARD';
|
||||
const initiator1 = { ID: '1', DisplayName: 'initiator1' };
|
||||
const initiator2 = { ID: '2', DisplayName: 'initiator2' };
|
||||
const keys = {
|
||||
v0: [`${overviewPrefix}test/1${splitter}uploadId1`,
|
||||
`${overviewPrefix}test/2${splitter}uploadId2`,
|
||||
`${overviewPrefix}test/3${splitter}uploadId3`,
|
||||
`${overviewPrefix}testMore/4${splitter}uploadId4`,
|
||||
`${overviewPrefix}testMore/5${splitter}uploadId5`,
|
||||
`${overviewPrefix}prefixTest/5${splitter}uploadId5`,
|
||||
],
|
||||
v1: [`${DbPrefixes.Master}${overviewPrefix}test/1${splitter}uploadId1`,
|
||||
`${DbPrefixes.Master}${overviewPrefix}test/2${splitter}uploadId2`,
|
||||
`${DbPrefixes.Master}${overviewPrefix}test/3${splitter}uploadId3`,
|
||||
`${DbPrefixes.Master}${overviewPrefix}testMore/4${splitter}uploadId4`,
|
||||
`${DbPrefixes.Master}${overviewPrefix}testMore/5${splitter}uploadId5`,
|
||||
`${DbPrefixes.Master}${overviewPrefix}prefixTest/5${splitter}uploadId5`,
|
||||
],
|
||||
};
|
||||
const values = [
|
||||
JSON.stringify({
|
||||
'key': 'test/1',
|
||||
'uploadId': 'uploadId1',
|
||||
'initiator': initiator1,
|
||||
'owner-id': '1',
|
||||
'owner-display-name': 'owner1',
|
||||
'x-amz-storage-class': storageClass,
|
||||
'initiated': '',
|
||||
}),
|
||||
JSON.stringify({
|
||||
'key': 'test/2',
|
||||
'uploadId': 'uploadId2',
|
||||
'initiator': initiator2,
|
||||
'owner-id': '1',
|
||||
'owner-display-name': 'owner2',
|
||||
'x-amz-storage-class': storageClass,
|
||||
'initiated': '',
|
||||
}),
|
||||
JSON.stringify({
|
||||
'key': 'test/3',
|
||||
'uploadId': 'uploadId3',
|
||||
'initiator': initiator1,
|
||||
'owner-id': '1',
|
||||
'owner-display-name': 'owner1',
|
||||
'x-amz-storage-class': storageClass,
|
||||
'initiated': '',
|
||||
}),
|
||||
JSON.stringify({
|
||||
'key': 'testMore/4',
|
||||
'uploadId': 'uploadId4',
|
||||
'initiator': initiator2,
|
||||
'owner-id': '1',
|
||||
'owner-display-name': 'owner2',
|
||||
'x-amz-storage-class': storageClass,
|
||||
'initiated': '',
|
||||
}),
|
||||
JSON.stringify({
|
||||
'key': 'testMore/5',
|
||||
'uploadId': 'uploadId5',
|
||||
'initiator': initiator1,
|
||||
'owner-id': '1',
|
||||
'owner-display-name': 'owner1',
|
||||
'x-amz-storage-class': storageClass,
|
||||
'initiated': '',
|
||||
}),
|
||||
JSON.stringify({
|
||||
'key': 'prefixTest/5',
|
||||
'uploadId': 'uploadId5',
|
||||
'initiator': initiator1,
|
||||
'owner-id': '1',
|
||||
'owner-display-name': 'owner1',
|
||||
'x-amz-storage-class': storageClass,
|
||||
'initiated': '',
|
||||
}),
|
||||
const keys = [
|
||||
{
|
||||
key: `${overviewPrefix}test/1${splitter}uploadId1`,
|
||||
value: JSON.stringify({
|
||||
'key': 'test/1',
|
||||
'uploadId': 'uploadId1',
|
||||
'initiator': initiator1,
|
||||
'owner-id': '1',
|
||||
'owner-display-name': 'owner1',
|
||||
'x-amz-storage-class': storageClass,
|
||||
'initiated': '',
|
||||
}),
|
||||
}, {
|
||||
key: `${overviewPrefix}test/2${splitter}uploadId2`,
|
||||
value: JSON.stringify({
|
||||
'key': 'test/2',
|
||||
'uploadId': 'uploadId2',
|
||||
'initiator': initiator2,
|
||||
'owner-id': '1',
|
||||
'owner-display-name': 'owner2',
|
||||
'x-amz-storage-class': storageClass,
|
||||
'initiated': '',
|
||||
}),
|
||||
}, {
|
||||
key: `${overviewPrefix}test/3${splitter}uploadId3`,
|
||||
value: JSON.stringify({
|
||||
'key': 'test/3',
|
||||
'uploadId': 'uploadId3',
|
||||
'initiator': initiator1,
|
||||
'owner-id': '1',
|
||||
'owner-display-name': 'owner1',
|
||||
'x-amz-storage-class': storageClass,
|
||||
'initiated': '',
|
||||
}),
|
||||
}, {
|
||||
key: `${overviewPrefix}testMore/4${splitter}uploadId4`,
|
||||
value: JSON.stringify({
|
||||
'key': 'testMore/4',
|
||||
'uploadId': 'uploadId4',
|
||||
'initiator': initiator2,
|
||||
'owner-id': '1',
|
||||
'owner-display-name': 'owner2',
|
||||
'x-amz-storage-class': storageClass,
|
||||
'initiated': '',
|
||||
}),
|
||||
}, {
|
||||
key: `${overviewPrefix}testMore/5${splitter}uploadId5`,
|
||||
value: JSON.stringify({
|
||||
'key': 'testMore/5',
|
||||
'uploadId': 'uploadId5',
|
||||
'initiator': initiator1,
|
||||
'owner-id': '1',
|
||||
'owner-display-name': 'owner1',
|
||||
'x-amz-storage-class': storageClass,
|
||||
'initiated': '',
|
||||
}),
|
||||
}, {
|
||||
key: `${overviewPrefix}prefixTest/5${splitter}uploadId5`,
|
||||
value: JSON.stringify({
|
||||
'key': 'prefixTest/5',
|
||||
'uploadId': 'uploadId5',
|
||||
'initiator': initiator1,
|
||||
'owner-id': '1',
|
||||
'owner-display-name': 'owner1',
|
||||
'x-amz-storage-class': storageClass,
|
||||
'initiated': '',
|
||||
}),
|
||||
},
|
||||
];
|
||||
let listingParams;
|
||||
let expectedResult;
|
||||
|
@ -109,8 +103,8 @@ describe('Multipart Uploads listing algorithm', () => {
|
|||
NextUploadIdMarker: 'uploadId5',
|
||||
};
|
||||
|
||||
expectedResult.Uploads = values.map(value => {
|
||||
const tmp = JSON.parse(value);
|
||||
expectedResult.Uploads = keys.map(obj => {
|
||||
const tmp = JSON.parse(obj.value);
|
||||
return {
|
||||
key: tmp.key,
|
||||
value: {
|
||||
|
@ -128,47 +122,44 @@ describe('Multipart Uploads listing algorithm', () => {
|
|||
done();
|
||||
});
|
||||
|
||||
['v0', 'v1'].forEach(vFormat => {
|
||||
const dbListing = keys[vFormat].map((key, i) => ({
|
||||
key,
|
||||
value: values[i],
|
||||
}));
|
||||
it(`should perform a vFormat=${vFormat} listing of all keys`, () => {
|
||||
const listingResult = performListing(dbListing, MultipartUploads,
|
||||
listingParams, logger, vFormat);
|
||||
assert.deepStrictEqual(listingResult, expectedResult);
|
||||
});
|
||||
it('should perform a listing of all keys', done => {
|
||||
const listingResult = performListing(keys, MultipartUploads,
|
||||
listingParams, logger);
|
||||
assert.deepStrictEqual(listingResult, expectedResult);
|
||||
done();
|
||||
});
|
||||
|
||||
it(`should perform a vFormat=${vFormat} listing with delimiter`, () => {
|
||||
const delimiter = '/';
|
||||
listingParams.delimiter = delimiter;
|
||||
// format result
|
||||
expectedResult.Uploads = [];
|
||||
expectedResult.CommonPrefixes = ['test/', 'testMore/', 'prefixTest/'];
|
||||
expectedResult.Delimiter = delimiter;
|
||||
expectedResult.MaxKeys = 1000;
|
||||
expectedResult.NextKeyMarker = 'prefixTest/';
|
||||
expectedResult.NextUploadIdMarker = '';
|
||||
it('should perform a listing with delimiter', done => {
|
||||
const delimiter = '/';
|
||||
listingParams.delimiter = delimiter;
|
||||
// format result
|
||||
expectedResult.Uploads = [];
|
||||
expectedResult.CommonPrefixes = ['test/', 'testMore/', 'prefixTest/'];
|
||||
expectedResult.Delimiter = delimiter;
|
||||
expectedResult.MaxKeys = 1000;
|
||||
expectedResult.NextKeyMarker = 'prefixTest/';
|
||||
expectedResult.NextUploadIdMarker = '';
|
||||
|
||||
const listingResult = performListing(dbListing, MultipartUploads,
|
||||
listingParams, logger, vFormat);
|
||||
assert.deepStrictEqual(listingResult, expectedResult);
|
||||
});
|
||||
const listingResult = performListing(keys, MultipartUploads,
|
||||
listingParams, logger);
|
||||
assert.deepStrictEqual(listingResult, expectedResult);
|
||||
done();
|
||||
});
|
||||
|
||||
it(`should perform a vFormat=${vFormat} listing with max keys`, () => {
|
||||
listingParams.maxKeys = 3;
|
||||
// format result
|
||||
expectedResult.Uploads.pop();
|
||||
expectedResult.Uploads.pop();
|
||||
expectedResult.Uploads.pop();
|
||||
expectedResult.NextKeyMarker = 'test/3';
|
||||
expectedResult.NextUploadIdMarker = 'uploadId3';
|
||||
expectedResult.IsTruncated = true;
|
||||
expectedResult.MaxKeys = 3;
|
||||
it('should perform a listing with max keys', done => {
|
||||
listingParams.maxKeys = 3;
|
||||
// format result
|
||||
expectedResult.Uploads.pop();
|
||||
expectedResult.Uploads.pop();
|
||||
expectedResult.Uploads.pop();
|
||||
expectedResult.NextKeyMarker = 'test/3';
|
||||
expectedResult.NextUploadIdMarker = 'uploadId3';
|
||||
expectedResult.IsTruncated = true;
|
||||
expectedResult.MaxKeys = 3;
|
||||
|
||||
const listingResult = performListing(dbListing, MultipartUploads,
|
||||
listingParams, logger, vFormat);
|
||||
assert.deepStrictEqual(listingResult, expectedResult);
|
||||
});
|
||||
const listingResult = performListing(keys, MultipartUploads,
|
||||
listingParams, logger);
|
||||
assert.deepStrictEqual(listingResult, expectedResult);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -9,15 +9,11 @@ const Werelogs = require('werelogs').Logger;
|
|||
const logger = new Werelogs('listTest');
|
||||
const performListing = require('../../../utils/performListing');
|
||||
const zpad = require('../../helpers').zpad;
|
||||
const { inc } = require('../../../../lib/algos/list/tools');
|
||||
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
|
||||
const { DbPrefixes } = VSConst;
|
||||
|
||||
class Test {
|
||||
constructor(name, input, genMDParams, output, filter) {
|
||||
constructor(name, input, output, filter) {
|
||||
this.name = name;
|
||||
this.input = input;
|
||||
this.genMDParams = genMDParams;
|
||||
this.output = output;
|
||||
this.filter = filter || this._defaultFilter;
|
||||
}
|
||||
|
@ -42,7 +38,6 @@ const data = [
|
|||
{ key: 'notes/yore.rs', value },
|
||||
{ key: 'notes/zaphod/Beeblebrox.txt', value },
|
||||
];
|
||||
|
||||
const dataVersioned = [
|
||||
{ key: 'Pâtisserie=中文-español-English', value },
|
||||
{ key: 'Pâtisserie=中文-español-English\0bar', value },
|
||||
|
@ -90,12 +85,6 @@ const receivedNonAlphaData = nonAlphabeticalData.map(
|
|||
|
||||
const tests = [
|
||||
new Test('all elements', {}, {
|
||||
v0: {},
|
||||
v1: {
|
||||
gte: DbPrefixes.Master,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
}, {
|
||||
Contents: receivedData,
|
||||
CommonPrefixes: [],
|
||||
Delimiter: undefined,
|
||||
|
@ -104,14 +93,6 @@ const tests = [
|
|||
}),
|
||||
new Test('with valid marker', {
|
||||
marker: receivedData[4].key,
|
||||
}, {
|
||||
v0: {
|
||||
gt: receivedData[4].key,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}${receivedData[4].key}`,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
}, {
|
||||
Contents: [
|
||||
receivedData[5],
|
||||
|
@ -128,14 +109,6 @@ const tests = [
|
|||
new Test('with bad marker', {
|
||||
marker: 'zzzz',
|
||||
delimiter: '/',
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'zzzz',
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}zzzz`,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
}, {
|
||||
Contents: [],
|
||||
CommonPrefixes: [],
|
||||
|
@ -145,12 +118,6 @@ const tests = [
|
|||
}, (e, input) => e.key > input.marker),
|
||||
new Test('with makKeys', {
|
||||
maxKeys: 3,
|
||||
}, {
|
||||
v0: {},
|
||||
v1: {
|
||||
gte: DbPrefixes.Master,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
}, {
|
||||
Contents: receivedData.slice(0, 3),
|
||||
CommonPrefixes: [],
|
||||
|
@ -160,12 +127,6 @@ const tests = [
|
|||
}),
|
||||
new Test('with big makKeys', {
|
||||
maxKeys: 15000,
|
||||
}, {
|
||||
v0: {},
|
||||
v1: {
|
||||
gte: DbPrefixes.Master,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
}, {
|
||||
Contents: receivedData,
|
||||
CommonPrefixes: [],
|
||||
|
@ -175,12 +136,6 @@ const tests = [
|
|||
}),
|
||||
new Test('with delimiter', {
|
||||
delimiter: '/',
|
||||
}, {
|
||||
v0: {},
|
||||
v1: {
|
||||
gte: DbPrefixes.Master,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
}, {
|
||||
Contents: [
|
||||
receivedData[0],
|
||||
|
@ -192,12 +147,6 @@ const tests = [
|
|||
}),
|
||||
new Test('with long delimiter', {
|
||||
delimiter: 'notes/summer',
|
||||
}, {
|
||||
v0: {},
|
||||
v1: {
|
||||
gte: DbPrefixes.Master,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
}, {
|
||||
Contents: [
|
||||
receivedData[0],
|
||||
|
@ -217,15 +166,6 @@ const tests = [
|
|||
delimiter: '/',
|
||||
prefix: 'notes/summer/',
|
||||
marker: 'notes/summer0',
|
||||
}, {
|
||||
v0: {
|
||||
gt: `notes/summer${inc('/')}`,
|
||||
lt: `notes/summer${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
||||
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [],
|
||||
CommonPrefixes: [],
|
||||
|
@ -236,15 +176,6 @@ const tests = [
|
|||
new Test('delimiter and prefix (related to #147)', {
|
||||
delimiter: '/',
|
||||
prefix: 'notes/',
|
||||
}, {
|
||||
v0: {
|
||||
gte: 'notes/',
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gte: `${DbPrefixes.Master}notes/`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [
|
||||
receivedData[7],
|
||||
|
@ -263,15 +194,6 @@ const tests = [
|
|||
delimiter: '/',
|
||||
prefix: 'notes/',
|
||||
marker: 'notes/year.txt',
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'notes/year.txt',
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}notes/year.txt`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [
|
||||
receivedData[8],
|
||||
|
@ -288,15 +210,6 @@ const tests = [
|
|||
prefix: 'notes/',
|
||||
marker: 'notes/',
|
||||
maxKeys: 1,
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'notes/',
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}notes/`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [],
|
||||
CommonPrefixes: ['notes/spring/'],
|
||||
|
@ -310,15 +223,6 @@ const tests = [
|
|||
prefix: 'notes/', // prefix
|
||||
marker: 'notes/spring/',
|
||||
maxKeys: 1,
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'notes/spring/',
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}notes/spring/`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [],
|
||||
CommonPrefixes: ['notes/summer/'],
|
||||
|
@ -332,15 +236,6 @@ const tests = [
|
|||
prefix: 'notes/', // prefix
|
||||
marker: 'notes/summer/',
|
||||
maxKeys: 1,
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'notes/summer/',
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}notes/summer/`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [
|
||||
receivedData[7],
|
||||
|
@ -356,15 +251,6 @@ const tests = [
|
|||
prefix: 'notes/', // prefix
|
||||
marker: 'notes/year.txt',
|
||||
maxKeys: 1,
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'notes/year.txt',
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}notes/year.txt`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [
|
||||
receivedData[8],
|
||||
|
@ -380,15 +266,6 @@ const tests = [
|
|||
prefix: 'notes/',
|
||||
marker: 'notes/yore.rs',
|
||||
maxKeys: 1,
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'notes/yore.rs',
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}notes/yore.rs`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [],
|
||||
CommonPrefixes: ['notes/zaphod/'],
|
||||
|
@ -399,12 +276,6 @@ const tests = [
|
|||
|
||||
new Test('all elements v2', {
|
||||
v2: true,
|
||||
}, {
|
||||
v0: {},
|
||||
v1: {
|
||||
gte: DbPrefixes.Master,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
}, {
|
||||
Contents: receivedData,
|
||||
CommonPrefixes: [],
|
||||
|
@ -415,14 +286,6 @@ const tests = [
|
|||
new Test('with valid startAfter', {
|
||||
startAfter: receivedData[4].key,
|
||||
v2: true,
|
||||
}, {
|
||||
v0: {
|
||||
gt: receivedData[4].key,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}${receivedData[4].key}`,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
}, {
|
||||
Contents: [
|
||||
receivedData[5],
|
||||
|
@ -440,14 +303,6 @@ const tests = [
|
|||
startAfter: 'zzzz',
|
||||
delimiter: '/',
|
||||
v2: true,
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'zzzz',
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}zzzz`,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
}, {
|
||||
Contents: [],
|
||||
CommonPrefixes: [],
|
||||
|
@ -458,14 +313,6 @@ const tests = [
|
|||
new Test('with valid continuationToken', {
|
||||
continuationToken: receivedData[4].key,
|
||||
v2: true,
|
||||
}, {
|
||||
v0: {
|
||||
gt: receivedData[4].key,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}${receivedData[4].key}`,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
}, {
|
||||
Contents: [
|
||||
receivedData[5],
|
||||
|
@ -483,14 +330,6 @@ const tests = [
|
|||
continuationToken: 'zzzz',
|
||||
delimiter: '/',
|
||||
v2: true,
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'zzzz',
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}zzzz`,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
}, {
|
||||
Contents: [],
|
||||
CommonPrefixes: [],
|
||||
|
@ -502,15 +341,6 @@ const tests = [
|
|||
delimiter: '/',
|
||||
prefix: 'notes/summer/',
|
||||
startAfter: 'notes/summer0',
|
||||
}, {
|
||||
v0: {
|
||||
gte: 'notes/summer/',
|
||||
lt: `notes/summer${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gte: `${DbPrefixes.Master}notes/summer/`,
|
||||
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [],
|
||||
CommonPrefixes: [],
|
||||
|
@ -522,15 +352,6 @@ const tests = [
|
|||
delimiter: '/',
|
||||
prefix: 'notes/summer/',
|
||||
continuationToken: 'notes/summer0',
|
||||
}, {
|
||||
v0: {
|
||||
gte: 'notes/summer/',
|
||||
lt: `notes/summer${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gte: `${DbPrefixes.Master}notes/summer/`,
|
||||
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [],
|
||||
CommonPrefixes: [],
|
||||
|
@ -543,14 +364,6 @@ const tests = [
|
|||
startAfter: 'notes/year.txt',
|
||||
maxKeys: 1,
|
||||
v2: true,
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'notes/year.txt',
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}notes/year.txt`,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
}, {
|
||||
Contents: [
|
||||
receivedData[8],
|
||||
|
@ -567,15 +380,6 @@ const tests = [
|
|||
startAfter: 'notes/',
|
||||
maxKeys: 1,
|
||||
v2: true,
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'notes/',
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}notes/`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [],
|
||||
CommonPrefixes: ['notes/spring/'],
|
||||
|
@ -590,15 +394,6 @@ const tests = [
|
|||
continuationToken: 'notes/spring/',
|
||||
maxKeys: 1,
|
||||
v2: true,
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'notes/spring/',
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}notes/spring/`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [],
|
||||
CommonPrefixes: ['notes/summer/'],
|
||||
|
@ -613,15 +408,6 @@ const tests = [
|
|||
continuationToken: 'notes/summer/',
|
||||
maxKeys: 1,
|
||||
v2: true,
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'notes/summer/',
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}notes/summer/`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [
|
||||
receivedData[7],
|
||||
|
@ -638,15 +424,6 @@ const tests = [
|
|||
startAfter: 'notes/year.txt',
|
||||
maxKeys: 1,
|
||||
v2: true,
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'notes/year.txt',
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}notes/year.txt`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [
|
||||
receivedData[8],
|
||||
|
@ -663,15 +440,6 @@ const tests = [
|
|||
startAfter: 'notes/yore.rs',
|
||||
maxKeys: 1,
|
||||
v2: true,
|
||||
}, {
|
||||
v0: {
|
||||
gt: 'notes/yore.rs',
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: {
|
||||
gt: `${DbPrefixes.Master}notes/yore.rs`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
},
|
||||
}, {
|
||||
Contents: [],
|
||||
CommonPrefixes: ['notes/zaphod/'],
|
||||
|
@ -704,107 +472,80 @@ const alphabeticalOrderTests = [
|
|||
},
|
||||
];
|
||||
|
||||
function getTestListing(test, data, vFormat) {
|
||||
return data
|
||||
.filter(e => test.filter(e, test.input))
|
||||
.map(obj => {
|
||||
if (vFormat === 'v0') {
|
||||
return obj;
|
||||
}
|
||||
if (vFormat === 'v1') {
|
||||
return {
|
||||
key: `${DbPrefixes.Master}${obj.key}`,
|
||||
value: obj.value,
|
||||
};
|
||||
}
|
||||
return assert.fail(`bad format ${vFormat}`);
|
||||
});
|
||||
}
|
||||
|
||||
['v0', 'v1'].forEach(vFormat => {
|
||||
describe(`vFormat=${vFormat} Delimiter listing algorithm`, () => {
|
||||
it('Should return good skipping value for DelimiterMaster', () => {
|
||||
const delimiter = new DelimiterMaster({ delimiter: '/' });
|
||||
for (let i = 0; i < 100; i++) {
|
||||
delimiter.filter({
|
||||
key: `${vFormat === 'v1' ? DbPrefixes.Master : ''}foo/${zpad(i)}`,
|
||||
value: '{}',
|
||||
});
|
||||
}
|
||||
assert.strictEqual(delimiter.skipping(),
|
||||
`${vFormat === 'v1' ? DbPrefixes.Master : ''}foo/`);
|
||||
});
|
||||
|
||||
it('Should set Delimiter alphabeticalOrder field to the expected value', () => {
|
||||
alphabeticalOrderTests.forEach(test => {
|
||||
const delimiter = new Delimiter(test.params);
|
||||
assert.strictEqual(delimiter.alphabeticalOrder,
|
||||
test.expectedValue,
|
||||
`${JSON.stringify(test.params)}`);
|
||||
});
|
||||
});
|
||||
|
||||
tests.forEach(test => {
|
||||
it(`Should return metadata listing params to list ${test.name}`, () => {
|
||||
const listing = new Delimiter(test.input, logger, vFormat);
|
||||
const params = listing.genMDParams();
|
||||
assert.deepStrictEqual(params, test.genMDParams[vFormat]);
|
||||
});
|
||||
it(`Should list ${test.name}`, () => {
|
||||
// Simulate skip scan done by LevelDB
|
||||
const d = getTestListing(test, data, vFormat);
|
||||
const res = performListing(d, Delimiter, test.input, logger, vFormat);
|
||||
assert.deepStrictEqual(res, test.output);
|
||||
});
|
||||
});
|
||||
|
||||
// Only v0 gets a listing of master and version keys together.
|
||||
if (vFormat === 'v0') {
|
||||
tests.forEach(test => {
|
||||
it(`Should list master versions ${test.name}`, () => {
|
||||
// Simulate skip scan done by LevelDB
|
||||
const d = dataVersioned.filter(e => test.filter(e, test.input));
|
||||
const res = performListing(d, DelimiterMaster, test.input, logger, vFormat);
|
||||
assert.deepStrictEqual(res, test.output);
|
||||
});
|
||||
});
|
||||
describe('Delimiter listing algorithm', () => {
|
||||
it('Should return good skipping value for DelimiterMaster', done => {
|
||||
const delimiter = new DelimiterMaster({ delimiter: '/' });
|
||||
for (let i = 0; i < 100; i++) {
|
||||
delimiter.filter({ key: `foo/${zpad(i)}`, value: '{}' });
|
||||
}
|
||||
assert.strictEqual(delimiter.skipping(), 'foo/');
|
||||
done();
|
||||
});
|
||||
|
||||
it('Should filter values according to alphabeticalOrder parameter', () => {
|
||||
let test = new Test('alphabeticalOrder parameter set', {
|
||||
delimiter: '/',
|
||||
alphabeticalOrder: true,
|
||||
}, {
|
||||
}, {
|
||||
Contents: [
|
||||
receivedNonAlphaData[0],
|
||||
],
|
||||
Delimiter: '/',
|
||||
CommonPrefixes: [],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
});
|
||||
let d = getTestListing(test, nonAlphabeticalData, vFormat);
|
||||
let res = performListing(d, Delimiter, test.input, logger, vFormat);
|
||||
assert.deepStrictEqual(res, test.output);
|
||||
it('Should set Delimiter alphabeticalOrder field to the expected value',
|
||||
() => {
|
||||
alphabeticalOrderTests.forEach(test => {
|
||||
const delimiter = new Delimiter(test.params);
|
||||
assert.strictEqual(delimiter.alphabeticalOrder,
|
||||
test.expectedValue,
|
||||
`${JSON.stringify(test.params)}`);
|
||||
});
|
||||
});
|
||||
|
||||
test = new Test('alphabeticalOrder parameter set', {
|
||||
delimiter: '/',
|
||||
alphabeticalOrder: false,
|
||||
}, {
|
||||
}, {
|
||||
Contents: [
|
||||
receivedNonAlphaData[0],
|
||||
receivedNonAlphaData[1],
|
||||
],
|
||||
Delimiter: '/',
|
||||
CommonPrefixes: [],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
});
|
||||
d = getTestListing(test, nonAlphabeticalData, vFormat);
|
||||
res = performListing(d, Delimiter, test.input, logger, vFormat);
|
||||
tests.forEach(test => {
|
||||
it(`Should list ${test.name}`, done => {
|
||||
// Simulate skip scan done by LevelDB
|
||||
const d = data.filter(e => test.filter(e, test.input));
|
||||
const res = performListing(d, Delimiter, test.input, logger);
|
||||
assert.deepStrictEqual(res, test.output);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
tests.forEach(test => {
|
||||
it(`Should list master versions ${test.name}`, done => {
|
||||
// Simulate skip scan done by LevelDB
|
||||
const d = dataVersioned.filter(e => test.filter(e, test.input));
|
||||
const res = performListing(d, DelimiterMaster, test.input, logger);
|
||||
assert.deepStrictEqual(res, test.output);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should filter values according to alphabeticalOrder parameter',
|
||||
() => {
|
||||
let test = new Test('alphabeticalOrder parameter set', {
|
||||
delimiter: '/',
|
||||
alphabeticalOrder: true,
|
||||
}, {
|
||||
Contents: [
|
||||
receivedNonAlphaData[0],
|
||||
],
|
||||
Delimiter: '/',
|
||||
CommonPrefixes: [],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
});
|
||||
let d = nonAlphabeticalData.filter(e => test.filter(e, test.input));
|
||||
let res = performListing(d, Delimiter, test.input, logger);
|
||||
assert.deepStrictEqual(res, test.output);
|
||||
|
||||
test = new Test('alphabeticalOrder parameter set', {
|
||||
delimiter: '/',
|
||||
alphabeticalOrder: false,
|
||||
}, {
|
||||
Contents: [
|
||||
receivedNonAlphaData[0],
|
||||
receivedNonAlphaData[1],
|
||||
],
|
||||
Delimiter: '/',
|
||||
CommonPrefixes: [],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
});
|
||||
d = nonAlphabeticalData.filter(e => test.filter(e, test.input));
|
||||
res = performListing(d, Delimiter, test.input, logger);
|
||||
assert.deepStrictEqual(res, test.output);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -13,7 +13,6 @@ const VSConst =
|
|||
require('../../../../lib/versioning/constants').VersioningConstants;
|
||||
const Version = require('../../../../lib/versioning/Version').Version;
|
||||
const { generateVersionId } = require('../../../../lib/versioning/VersionID');
|
||||
const { DbPrefixes } = VSConst;
|
||||
|
||||
|
||||
const VID_SEP = VSConst.VersionId.Separator;
|
||||
|
@ -34,426 +33,395 @@ const fakeLogger = {
|
|||
fatal: () => {},
|
||||
};
|
||||
|
||||
function getListingKey(key, vFormat) {
|
||||
if (vFormat === 'v0') {
|
||||
return key;
|
||||
}
|
||||
if (vFormat === 'v1') {
|
||||
return `${DbPrefixes.Master}${key}`;
|
||||
}
|
||||
return assert.fail(`bad vFormat ${vFormat}`);
|
||||
}
|
||||
describe('Delimiter All masters listing algorithm', () => {
|
||||
it('should return SKIP_NONE for DelimiterMaster when both NextMarker ' +
|
||||
'and NextContinuationToken are undefined', () => {
|
||||
const delimiter = new DelimiterMaster({ delimiter: '/' }, fakeLogger);
|
||||
|
||||
['v0', 'v1'].forEach(vFormat => {
|
||||
describe(`Delimiter All masters listing algorithm vFormat=${vFormat}`, () => {
|
||||
it('should return SKIP_NONE for DelimiterMaster when both NextMarker ' +
|
||||
'and NextContinuationToken are undefined', () => {
|
||||
const delimiter = new DelimiterMaster({ delimiter: '/' }, fakeLogger, vFormat);
|
||||
assert.strictEqual(delimiter.NextMarker, undefined);
|
||||
|
||||
assert.strictEqual(delimiter.NextMarker, undefined);
|
||||
// When there is no NextMarker or NextContinuationToken, it should
|
||||
// return SKIP_NONE
|
||||
assert.strictEqual(delimiter.skipping(), SKIP_NONE);
|
||||
});
|
||||
|
||||
// When there is no NextMarker or NextContinuationToken, it should
|
||||
// return SKIP_NONE
|
||||
assert.strictEqual(delimiter.skipping(), SKIP_NONE);
|
||||
it('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
|
||||
'NextMarker is set and there is a delimiter', () => {
|
||||
const key = 'key';
|
||||
const delimiter = new DelimiterMaster({ delimiter: '/', marker: key },
|
||||
fakeLogger);
|
||||
|
||||
/* Filter a master version to set NextMarker. */
|
||||
// TODO: useless once S3C-1628 is fixed.
|
||||
delimiter.filter({ key, value: '' });
|
||||
assert.strictEqual(delimiter.NextMarker, key);
|
||||
|
||||
/* With a delimiter skipping should return previous key + VID_SEP
|
||||
* (except when a delimiter is set and the NextMarker ends with the
|
||||
* delimiter) . */
|
||||
assert.strictEqual(delimiter.skipping(), key + VID_SEP);
|
||||
});
|
||||
|
||||
it('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
|
||||
'NextContinuationToken is set and there is a delimiter', () => {
|
||||
const key = 'key';
|
||||
const delimiter = new DelimiterMaster(
|
||||
{ delimiter: '/', startAfter: key, v2: true },
|
||||
fakeLogger);
|
||||
|
||||
// Filter a master version to set NextContinuationToken
|
||||
delimiter.filter({ key, value: '' });
|
||||
assert.strictEqual(delimiter.NextContinuationToken, key);
|
||||
|
||||
assert.strictEqual(delimiter.skipping(), key + VID_SEP);
|
||||
});
|
||||
|
||||
it('should return NextMarker for DelimiterMaster when NextMarker is set' +
|
||||
', there is a delimiter and the key ends with the delimiter', () => {
|
||||
const delimiterChar = '/';
|
||||
const keyWithEndingDelimiter = `key${delimiterChar}`;
|
||||
const delimiter = new DelimiterMaster({
|
||||
delimiter: delimiterChar,
|
||||
marker: keyWithEndingDelimiter,
|
||||
}, fakeLogger);
|
||||
|
||||
/* When a delimiter is set and the NextMarker ends with the
|
||||
* delimiter it should return the next marker value. */
|
||||
assert.strictEqual(delimiter.NextMarker, keyWithEndingDelimiter);
|
||||
assert.strictEqual(delimiter.skipping(), keyWithEndingDelimiter);
|
||||
});
|
||||
|
||||
it('should skip entries not starting with prefix', () => {
|
||||
const delimiter = new DelimiterMaster({ prefix: 'prefix' }, fakeLogger);
|
||||
|
||||
assert.strictEqual(delimiter.filter({ key: 'wrong' }), FILTER_SKIP);
|
||||
assert.strictEqual(delimiter.NextMarker, undefined);
|
||||
assert.strictEqual(delimiter.prvKey, undefined);
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should skip entries superior to next marker', () => {
|
||||
const delimiter = new DelimiterMaster({ marker: 'b' }, fakeLogger);
|
||||
|
||||
assert.strictEqual(delimiter.filter({ key: 'a' }), FILTER_SKIP);
|
||||
assert.strictEqual(delimiter.NextMarker, 'b');
|
||||
assert.strictEqual(delimiter.prvKey, undefined);
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should accept a master version', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||
const key = 'key';
|
||||
const value = '';
|
||||
|
||||
assert.strictEqual(delimiter.filter({ key, value }), FILTER_ACCEPT);
|
||||
assert.strictEqual(delimiter.prvKey, key);
|
||||
assert.strictEqual(delimiter.NextMarker, key);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [],
|
||||
Contents: [{ key, value }],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
|
||||
'NextMarker is set and there is a delimiter', () => {
|
||||
const key = 'key';
|
||||
const delimiter = new DelimiterMaster({ delimiter: '/', marker: key },
|
||||
fakeLogger, vFormat);
|
||||
it('should accept a PHD version as first input', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||
const keyPHD = 'keyPHD';
|
||||
const objPHD = {
|
||||
key: keyPHD,
|
||||
value: Version.generatePHDVersion(generateVersionId('', '')),
|
||||
};
|
||||
|
||||
/* Filter a master version to set NextMarker. */
|
||||
// TODO: useless once S3C-1628 is fixed.
|
||||
const listingKey = getListingKey(key, vFormat);
|
||||
delimiter.filter({ key: listingKey, value: '' });
|
||||
assert.strictEqual(delimiter.NextMarker, key);
|
||||
/* When filtered, it should return FILTER_ACCEPT and set the prvKey.
|
||||
* to undefined. It should not be added to result the content or common
|
||||
* prefixes. */
|
||||
assert.strictEqual(delimiter.filter(objPHD), FILTER_ACCEPT);
|
||||
assert.strictEqual(delimiter.prvKey, undefined);
|
||||
assert.strictEqual(delimiter.NextMarker, undefined);
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
/* With a delimiter skipping should return previous key + VID_SEP
|
||||
* (except when a delimiter is set and the NextMarker ends with the
|
||||
* delimiter) . */
|
||||
assert.strictEqual(delimiter.skipping(), listingKey + VID_SEP);
|
||||
it('should accept a PHD version', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||
const key = 'keyA';
|
||||
const value = '';
|
||||
const keyPHD = 'keyBPHD';
|
||||
const objPHD = {
|
||||
key: keyPHD,
|
||||
value: Version.generatePHDVersion(generateVersionId('', '')),
|
||||
};
|
||||
|
||||
/* Filter a master version to set the NextMarker, the prvKey and add
|
||||
* and element in result content. */
|
||||
delimiter.filter({ key, value });
|
||||
|
||||
/* When filtered, it should return FILTER_ACCEPT and set the prvKey.
|
||||
* to undefined. It should not be added to the result content or common
|
||||
* prefixes. */
|
||||
assert.strictEqual(delimiter.filter(objPHD), FILTER_ACCEPT);
|
||||
assert.strictEqual(delimiter.prvKey, undefined);
|
||||
assert.strictEqual(delimiter.NextMarker, key);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [],
|
||||
Contents: [{ key, value }],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
|
||||
'NextContinuationToken is set and there is a delimiter', () => {
|
||||
const key = 'key';
|
||||
const delimiter = new DelimiterMaster(
|
||||
{ delimiter: '/', startAfter: key, v2: true },
|
||||
fakeLogger, vFormat);
|
||||
it('should accept a version after a PHD', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||
const masterKey = 'key';
|
||||
const keyVersion = `${masterKey}${VID_SEP}version`;
|
||||
const value = '';
|
||||
const objPHD = {
|
||||
key: masterKey,
|
||||
value: Version.generatePHDVersion(generateVersionId('', '')),
|
||||
};
|
||||
|
||||
// Filter a master version to set NextContinuationToken
|
||||
const listingKey = getListingKey(key, vFormat);
|
||||
delimiter.filter({ key: listingKey, value: '' });
|
||||
assert.strictEqual(delimiter.NextContinuationToken, key);
|
||||
/* Filter the PHD object. */
|
||||
delimiter.filter(objPHD);
|
||||
|
||||
assert.strictEqual(delimiter.skipping(), listingKey + VID_SEP);
|
||||
/* The filtering of the PHD object has no impact, the version is
|
||||
* accepted and added to the result. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: keyVersion,
|
||||
value,
|
||||
}), FILTER_ACCEPT);
|
||||
assert.strictEqual(delimiter.prvKey, masterKey);
|
||||
assert.strictEqual(delimiter.NextMarker, masterKey);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [],
|
||||
Contents: [{ key: masterKey, value }],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return NextMarker for DelimiterMaster when NextMarker is set' +
|
||||
', there is a delimiter and the key ends with the delimiter', () => {
|
||||
const delimiterChar = '/';
|
||||
const keyWithEndingDelimiter = `key${delimiterChar}`;
|
||||
const delimiter = new DelimiterMaster({
|
||||
delimiter: delimiterChar,
|
||||
marker: keyWithEndingDelimiter,
|
||||
}, fakeLogger, vFormat);
|
||||
it('should accept a delete marker', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||
const version = new Version({ isDeleteMarker: true });
|
||||
const key = 'key';
|
||||
const obj = {
|
||||
key: `${key}${VID_SEP}version`,
|
||||
value: version.toString(),
|
||||
};
|
||||
|
||||
/* When a delimiter is set and the NextMarker ends with the
|
||||
* delimiter it should return the next marker value. */
|
||||
assert.strictEqual(delimiter.NextMarker, keyWithEndingDelimiter);
|
||||
const skipKey = vFormat === 'v1' ?
|
||||
`${DbPrefixes.Master}${keyWithEndingDelimiter}` :
|
||||
keyWithEndingDelimiter;
|
||||
assert.strictEqual(delimiter.skipping(), skipKey);
|
||||
/* When filtered, it should return FILTER_SKIP and set the prvKey. It
|
||||
* should not be added to the result content or common prefixes. */
|
||||
assert.strictEqual(delimiter.filter(obj), FILTER_SKIP);
|
||||
assert.strictEqual(delimiter.NextMarker, undefined);
|
||||
assert.strictEqual(delimiter.prvKey, key);
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should skip version after a delete marker', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||
const version = new Version({ isDeleteMarker: true });
|
||||
const key = 'key';
|
||||
const versionKey = `${key}${VID_SEP}version`;
|
||||
|
||||
delimiter.filter({ key, value: version.toString() });
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: versionKey,
|
||||
value: 'value',
|
||||
}), FILTER_SKIP);
|
||||
assert.strictEqual(delimiter.NextMarker, undefined);
|
||||
assert.strictEqual(delimiter.prvKey, key);
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should accept a new key after a delete marker', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||
const version = new Version({ isDeleteMarker: true });
|
||||
const key1 = 'key1';
|
||||
const key2 = 'key2';
|
||||
const value = 'value';
|
||||
|
||||
delimiter.filter({ key: key1, value: version.toString() });
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: key2,
|
||||
value: 'value',
|
||||
}), FILTER_ACCEPT);
|
||||
assert.strictEqual(delimiter.NextMarker, key2);
|
||||
assert.strictEqual(delimiter.prvKey, key2);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [],
|
||||
Contents: [{ key: key2, value }],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should skip entries not starting with prefix', () => {
|
||||
const delimiter = new DelimiterMaster({ prefix: 'prefix' }, fakeLogger, vFormat);
|
||||
it('should accept the master version and skip the other ones', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||
const masterKey = 'key';
|
||||
const masterValue = 'value';
|
||||
const versionKey = `${masterKey}${VID_SEP}version`;
|
||||
const versionValue = 'versionvalue';
|
||||
|
||||
const listingKey = getListingKey('wrong', vFormat);
|
||||
assert.strictEqual(delimiter.filter({ key: listingKey }), FILTER_SKIP);
|
||||
assert.strictEqual(delimiter.NextMarker, undefined);
|
||||
assert.strictEqual(delimiter.prvKey, undefined);
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
/* Filter the master version. */
|
||||
delimiter.filter({ key: masterKey, value: masterValue });
|
||||
|
||||
/* Version is skipped, not added to the result. The delimiter
|
||||
* NextMarker and prvKey value are unmodified and set to the
|
||||
* masterKey. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: versionKey,
|
||||
value: versionValue,
|
||||
}), FILTER_SKIP);
|
||||
assert.strictEqual(delimiter.NextMarker, masterKey);
|
||||
assert.strictEqual(delimiter.prvKey, masterKey);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [],
|
||||
Contents: [{ key: masterKey, value: masterValue }],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should skip entries inferior to next marker', () => {
|
||||
const delimiter = new DelimiterMaster({ marker: 'b' }, fakeLogger, vFormat);
|
||||
it('should return good listing result for version', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||
const masterKey = 'key';
|
||||
const versionKey1 = `${masterKey}${VID_SEP}version1`;
|
||||
const versionKey2 = `${masterKey}${VID_SEP}version2`;
|
||||
const value2 = 'value2';
|
||||
|
||||
const listingKey = getListingKey('a', vFormat);
|
||||
assert.strictEqual(delimiter.filter({ key: listingKey }), FILTER_SKIP);
|
||||
assert.strictEqual(delimiter.NextMarker, 'b');
|
||||
assert.strictEqual(delimiter.prvKey, undefined);
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
/* Filter the PHD version. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: masterKey,
|
||||
value: '{ "isPHD": true, "value": "version" }',
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
/* Filter a delete marker version. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: versionKey1,
|
||||
value: '{ "isDeleteMarker": true }',
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
/* Filter a last version with a specific value. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: versionKey2,
|
||||
value: value2,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [],
|
||||
Contents: [{ key: masterKey, value: value2 }],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept a master version', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
|
||||
const key = 'key';
|
||||
const value = '';
|
||||
it('should return good values for entries with different common prefixes',
|
||||
() => {
|
||||
const delimiterChar = '/';
|
||||
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
|
||||
const commonPrefix2 = `commonPrefix2${delimiterChar}`;
|
||||
const prefix1Key1 = `${commonPrefix1}key1`;
|
||||
const prefix1Key2 = `${commonPrefix1}key2`;
|
||||
const prefix2Key1 = `${commonPrefix2}key1`;
|
||||
const value = 'value';
|
||||
|
||||
const listingKey = getListingKey(key, vFormat);
|
||||
assert.strictEqual(delimiter.filter({ key: listingKey, value }), FILTER_ACCEPT);
|
||||
if (vFormat === 'v0') {
|
||||
assert.strictEqual(delimiter.prvKey, key);
|
||||
}
|
||||
assert.strictEqual(delimiter.NextMarker, key);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [],
|
||||
Contents: [{ key, value }],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: undefined,
|
||||
});
|
||||
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
|
||||
fakeLogger);
|
||||
|
||||
/* Filter the first entry with a common prefix. It should be
|
||||
* accepted and added to the result. */
|
||||
assert.strictEqual(delimiter.filter({ key: prefix1Key1, value }),
|
||||
FILTER_ACCEPT);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [commonPrefix1],
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: delimiterChar,
|
||||
});
|
||||
|
||||
/* Filter the second entry with the same common prefix than the
|
||||
* first entry. It should be skipped and not added to the result. */
|
||||
assert.strictEqual(delimiter.filter({ key: prefix1Key2, value }),
|
||||
FILTER_SKIP);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [commonPrefix1],
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: delimiterChar,
|
||||
});
|
||||
|
||||
/* Filter an entry with a new common prefix. It should be accepted
|
||||
* and not added to the result. */
|
||||
assert.strictEqual(delimiter.filter({ key: prefix2Key1, value }),
|
||||
FILTER_ACCEPT);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [commonPrefix1, commonPrefix2],
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: delimiterChar,
|
||||
});
|
||||
});
|
||||
|
||||
/* We test here the internal management of the prvKey field of the
|
||||
* DelimiterMaster class, in particular once it has been set to an entry
|
||||
* key before to finally skip this entry because of an already present
|
||||
* common prefix. */
|
||||
it('should accept a version after skipping an object because of its ' +
|
||||
'commonPrefix', () => {
|
||||
const delimiterChar = '/';
|
||||
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
|
||||
const commonPrefix2 = `commonPrefix2${delimiterChar}`;
|
||||
const prefix1Key1 = `${commonPrefix1}key1`;
|
||||
const prefix1Key2 = `${commonPrefix1}key2`;
|
||||
const prefix2VersionKey1 = `${commonPrefix2}key1${VID_SEP}version`;
|
||||
const value = 'value';
|
||||
|
||||
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
|
||||
fakeLogger);
|
||||
|
||||
/* Filter the two first entries with the same common prefix to add
|
||||
* it to the result and reach the state where an entry is skipped
|
||||
* because of an already present common prefix in the result. */
|
||||
delimiter.filter({ key: prefix1Key1, value });
|
||||
delimiter.filter({ key: prefix1Key2, value });
|
||||
|
||||
/* Filter an object with a key containing a version part and a new
|
||||
* common prefix. It should be accepted and the new common prefix
|
||||
* added to the result. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: prefix2VersionKey1,
|
||||
value,
|
||||
}), FILTER_ACCEPT);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [commonPrefix1, commonPrefix2],
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: delimiterChar,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return good values for entries with different common prefixes', () => {
|
||||
const delimiterChar = '/';
|
||||
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
|
||||
const commonPrefix2 = `commonPrefix2${delimiterChar}`;
|
||||
const prefix1Key1 = `${commonPrefix1}key1`;
|
||||
const prefix1Key2 = `${commonPrefix1}key2`;
|
||||
const prefix2Key1 = `${commonPrefix2}key1`;
|
||||
const value = 'value';
|
||||
it('should skip a versioned entry when there is a delimiter and the key ' +
|
||||
'starts with the NextMarker value', () => {
|
||||
const delimiterChar = '/';
|
||||
const commonPrefix = `commonPrefix${delimiterChar}`;
|
||||
const key = `${commonPrefix}key${VID_SEP}version`;
|
||||
const value = 'value';
|
||||
|
||||
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
|
||||
fakeLogger, vFormat);
|
||||
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
|
||||
fakeLogger);
|
||||
/* TODO: should be set to a whole key instead of just a common prefix
|
||||
* once ZENKO-1048 is fixed. */
|
||||
delimiter.NextMarker = commonPrefix;
|
||||
|
||||
/* Filter the first entry with a common prefix. It should be
|
||||
* accepted and added to the result. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: getListingKey(prefix1Key1, vFormat),
|
||||
value,
|
||||
}),
|
||||
FILTER_ACCEPT);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [commonPrefix1],
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: delimiterChar,
|
||||
});
|
||||
|
||||
/* Filter the second entry with the same common prefix than the
|
||||
* first entry. It should be skipped and not added to the result. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: getListingKey(prefix1Key2, vFormat),
|
||||
value,
|
||||
}),
|
||||
FILTER_SKIP);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [commonPrefix1],
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: delimiterChar,
|
||||
});
|
||||
|
||||
/* Filter an entry with a new common prefix. It should be accepted
|
||||
* and not added to the result. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: getListingKey(prefix2Key1, vFormat),
|
||||
value,
|
||||
}),
|
||||
FILTER_ACCEPT);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [commonPrefix1, commonPrefix2],
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: delimiterChar,
|
||||
});
|
||||
});
|
||||
|
||||
if (vFormat === 'v0') {
|
||||
it('should accept a PHD version as first input', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
|
||||
const keyPHD = 'keyPHD';
|
||||
const objPHD = {
|
||||
key: keyPHD,
|
||||
value: Version.generatePHDVersion(generateVersionId('', '')),
|
||||
};
|
||||
|
||||
/* When filtered, it should return FILTER_ACCEPT and set the prvKey.
|
||||
* to undefined. It should not be added to result the content or common
|
||||
* prefixes. */
|
||||
assert.strictEqual(delimiter.filter(objPHD), FILTER_ACCEPT);
|
||||
assert.strictEqual(delimiter.prvKey, undefined);
|
||||
assert.strictEqual(delimiter.NextMarker, undefined);
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should accept a PHD version', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
|
||||
const key = 'keyA';
|
||||
const value = '';
|
||||
const keyPHD = 'keyBPHD';
|
||||
const objPHD = {
|
||||
key: keyPHD,
|
||||
value: Version.generatePHDVersion(generateVersionId('', '')),
|
||||
};
|
||||
|
||||
/* Filter a master version to set the NextMarker, the prvKey and add
|
||||
* and element in result content. */
|
||||
delimiter.filter({ key, value });
|
||||
|
||||
/* When filtered, it should return FILTER_ACCEPT and set the prvKey.
|
||||
* to undefined. It should not be added to the result content or common
|
||||
* prefixes. */
|
||||
assert.strictEqual(delimiter.filter(objPHD), FILTER_ACCEPT);
|
||||
assert.strictEqual(delimiter.prvKey, undefined);
|
||||
assert.strictEqual(delimiter.NextMarker, key);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [],
|
||||
Contents: [{ key, value }],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept a version after a PHD', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
|
||||
const masterKey = 'key';
|
||||
const keyVersion = `${masterKey}${VID_SEP}version`;
|
||||
const value = '';
|
||||
const objPHD = {
|
||||
key: masterKey,
|
||||
value: Version.generatePHDVersion(generateVersionId('', '')),
|
||||
};
|
||||
|
||||
/* Filter the PHD object. */
|
||||
delimiter.filter(objPHD);
|
||||
|
||||
/* The filtering of the PHD object has no impact, the version is
|
||||
* accepted and added to the result. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: keyVersion,
|
||||
value,
|
||||
}), FILTER_ACCEPT);
|
||||
assert.strictEqual(delimiter.prvKey, masterKey);
|
||||
assert.strictEqual(delimiter.NextMarker, masterKey);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [],
|
||||
Contents: [{ key: masterKey, value }],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept a delete marker', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
|
||||
const version = new Version({ isDeleteMarker: true });
|
||||
const key = 'key';
|
||||
const obj = {
|
||||
key: `${key}${VID_SEP}version`,
|
||||
value: version.toString(),
|
||||
};
|
||||
|
||||
/* When filtered, it should return FILTER_SKIP and set the prvKey. It
|
||||
* should not be added to the result content or common prefixes. */
|
||||
assert.strictEqual(delimiter.filter(obj), FILTER_SKIP);
|
||||
assert.strictEqual(delimiter.NextMarker, undefined);
|
||||
assert.strictEqual(delimiter.prvKey, key);
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should skip version after a delete marker', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
|
||||
const version = new Version({ isDeleteMarker: true });
|
||||
const key = 'key';
|
||||
const versionKey = `${key}${VID_SEP}version`;
|
||||
|
||||
delimiter.filter({ key, value: version.toString() });
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: versionKey,
|
||||
value: 'value',
|
||||
}), FILTER_SKIP);
|
||||
assert.strictEqual(delimiter.NextMarker, undefined);
|
||||
assert.strictEqual(delimiter.prvKey, key);
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should accept a new key after a delete marker', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
|
||||
const version = new Version({ isDeleteMarker: true });
|
||||
const key1 = 'key1';
|
||||
const key2 = 'key2';
|
||||
const value = 'value';
|
||||
|
||||
delimiter.filter({ key: key1, value: version.toString() });
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: key2,
|
||||
value: 'value',
|
||||
}), FILTER_ACCEPT);
|
||||
assert.strictEqual(delimiter.NextMarker, key2);
|
||||
assert.strictEqual(delimiter.prvKey, key2);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [],
|
||||
Contents: [{ key: key2, value }],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should accept the master version and skip the other ones', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
|
||||
const masterKey = 'key';
|
||||
const masterValue = 'value';
|
||||
const versionKey = `${masterKey}${VID_SEP}version`;
|
||||
const versionValue = 'versionvalue';
|
||||
|
||||
/* Filter the master version. */
|
||||
delimiter.filter({ key: masterKey, value: masterValue });
|
||||
|
||||
/* Version is skipped, not added to the result. The delimiter
|
||||
* NextMarker and prvKey value are unmodified and set to the
|
||||
* masterKey. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: versionKey,
|
||||
value: versionValue,
|
||||
}), FILTER_SKIP);
|
||||
assert.strictEqual(delimiter.NextMarker, masterKey);
|
||||
assert.strictEqual(delimiter.prvKey, masterKey);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [],
|
||||
Contents: [{ key: masterKey, value: masterValue }],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('should return good listing result for version', () => {
|
||||
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
|
||||
const masterKey = 'key';
|
||||
const versionKey1 = `${masterKey}${VID_SEP}version1`;
|
||||
const versionKey2 = `${masterKey}${VID_SEP}version2`;
|
||||
const value2 = 'value2';
|
||||
|
||||
/* Filter the PHD version. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: masterKey,
|
||||
value: '{ "isPHD": true, "value": "version" }',
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
/* Filter a delete marker version. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: versionKey1,
|
||||
value: '{ "isDeleteMarker": true }',
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
/* Filter a last version with a specific value. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: versionKey2,
|
||||
value: value2,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [],
|
||||
Contents: [{ key: masterKey, value: value2 }],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
/* We test here the internal management of the prvKey field of the
|
||||
* DelimiterMaster class, in particular once it has been set to an entry
|
||||
* key before to finally skip this entry because of an already present
|
||||
* common prefix. */
|
||||
it('should accept a version after skipping an object because of its commonPrefix', () => {
|
||||
const delimiterChar = '/';
|
||||
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
|
||||
const commonPrefix2 = `commonPrefix2${delimiterChar}`;
|
||||
const prefix1Key1 = `${commonPrefix1}key1`;
|
||||
const prefix1Key2 = `${commonPrefix1}key2`;
|
||||
const prefix2VersionKey1 = `${commonPrefix2}key1${VID_SEP}version`;
|
||||
const value = 'value';
|
||||
|
||||
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
|
||||
fakeLogger, vFormat);
|
||||
|
||||
/* Filter the two first entries with the same common prefix to add
|
||||
* it to the result and reach the state where an entry is skipped
|
||||
* because of an already present common prefix in the result. */
|
||||
delimiter.filter({ key: prefix1Key1, value });
|
||||
delimiter.filter({ key: prefix1Key2, value });
|
||||
|
||||
/* Filter an object with a key containing a version part and a new
|
||||
* common prefix. It should be accepted and the new common prefix
|
||||
* added to the result. */
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: prefix2VersionKey1,
|
||||
value,
|
||||
}), FILTER_ACCEPT);
|
||||
assert.deepStrictEqual(delimiter.result(), {
|
||||
CommonPrefixes: [commonPrefix1, commonPrefix2],
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
NextMarker: undefined,
|
||||
Delimiter: delimiterChar,
|
||||
});
|
||||
});
|
||||
|
||||
it('should skip a versioned entry when there is a delimiter and the key ' +
|
||||
'starts with the NextMarker value', () => {
|
||||
const delimiterChar = '/';
|
||||
const commonPrefix = `commonPrefix${delimiterChar}`;
|
||||
const key = `${commonPrefix}key${VID_SEP}version`;
|
||||
const value = 'value';
|
||||
|
||||
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
|
||||
fakeLogger, vFormat);
|
||||
/* TODO: should be set to a whole key instead of just a common prefix
|
||||
* once ZENKO-1048 is fixed. */
|
||||
delimiter.NextMarker = commonPrefix;
|
||||
|
||||
assert.strictEqual(delimiter.filter({ key, value }), FILTER_SKIP);
|
||||
});
|
||||
}
|
||||
assert.strictEqual(delimiter.filter({ key, value }), FILTER_SKIP);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -7,16 +7,11 @@ const Werelogs = require('werelogs').Logger;
|
|||
const logger = new Werelogs('listTest');
|
||||
const performListing = require('../../../utils/performListing');
|
||||
const zpad = require('../../helpers').zpad;
|
||||
const { inc } = require('../../../../lib/algos/list/tools');
|
||||
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
|
||||
const { DbPrefixes } = VSConst;
|
||||
const VID_SEP = VSConst.VersionId.Separator;
|
||||
|
||||
class Test {
|
||||
constructor(name, input, genMDParams, output, filter) {
|
||||
constructor(name, input, output, filter) {
|
||||
this.name = name;
|
||||
this.input = input;
|
||||
this.genMDParams = genMDParams;
|
||||
this.output = output;
|
||||
this.filter = filter || this._defaultFilter;
|
||||
}
|
||||
|
@ -32,75 +27,40 @@ const bar = '{"versionId":"bar"}';
|
|||
const qux = '{"versionId":"qux"}';
|
||||
const valuePHD = '{"isPHD":"true","versionId":"1234567890abcdefg"}';
|
||||
const valueDeleteMarker = '{"hello":"world","isDeleteMarker":"true"}';
|
||||
const dataVersioned = {
|
||||
v0: [
|
||||
{ key: 'Pâtisserie=中文-español-English', value: bar },
|
||||
{ key: `Pâtisserie=中文-español-English${VID_SEP}bar`, value: bar },
|
||||
{ key: `Pâtisserie=中文-español-English${VID_SEP}foo`, value: foo },
|
||||
{ key: 'notes/spring/1.txt', value: bar },
|
||||
{ key: `notes/spring/1.txt${VID_SEP}bar`, value: bar },
|
||||
{ key: `notes/spring/1.txt${VID_SEP}foo`, value: foo },
|
||||
{ key: `notes/spring/1.txt${VID_SEP}qux`, value: qux },
|
||||
{ key: 'notes/spring/2.txt', value: valuePHD },
|
||||
{ key: `notes/spring/2.txt${VID_SEP}bar`, value: valueDeleteMarker },
|
||||
{ key: `notes/spring/2.txt${VID_SEP}foo`, value: foo },
|
||||
{ key: 'notes/spring/march/1.txt',
|
||||
value: '{"versionId":"null","isNull":true}' },
|
||||
{ key: `notes/spring/march/1.txt${VID_SEP}bar`, value: bar },
|
||||
{ key: `notes/spring/march/1.txt${VID_SEP}foo`, value: foo },
|
||||
{ key: 'notes/summer/1.txt', value: bar },
|
||||
{ key: `notes/summer/1.txt${VID_SEP}bar`, value: bar },
|
||||
{ key: `notes/summer/1.txt${VID_SEP}foo`, value: foo },
|
||||
{ key: 'notes/summer/2.txt', value: bar },
|
||||
{ key: `notes/summer/2.txt${VID_SEP}bar`, value: bar },
|
||||
{ key: 'notes/summer/4.txt', value: valuePHD },
|
||||
{ key: `notes/summer/4.txt${VID_SEP}bar`, value: valueDeleteMarker },
|
||||
{ key: `notes/summer/4.txt${VID_SEP}foo`, value: valueDeleteMarker },
|
||||
{ key: `notes/summer/4.txt${VID_SEP}qux`, value: valueDeleteMarker },
|
||||
{ key: 'notes/summer/44.txt', value: valuePHD },
|
||||
{ key: 'notes/summer/444.txt', value: valueDeleteMarker },
|
||||
{ key: 'notes/summer/4444.txt', value: valuePHD },
|
||||
{ key: 'notes/summer/44444.txt', value: valueDeleteMarker },
|
||||
{ key: 'notes/summer/444444.txt', value: valuePHD },
|
||||
{ key: 'notes/summer/august/1.txt', value },
|
||||
{ key: 'notes/year.txt', value },
|
||||
{ key: 'notes/yore.rs', value },
|
||||
{ key: 'notes/zaphod/Beeblebrox.txt', value },
|
||||
],
|
||||
v1: [ // we add M and V prefixes in getTestListing() due to the
|
||||
// test cases needing the original key to filter
|
||||
{ key: 'Pâtisserie=中文-español-English', value: bar },
|
||||
{ key: `Pâtisserie=中文-español-English${VID_SEP}bar`, value: bar },
|
||||
{ key: `Pâtisserie=中文-español-English${VID_SEP}foo`, value: foo },
|
||||
{ key: 'notes/spring/1.txt', value: bar },
|
||||
{ key: `notes/spring/1.txt${VID_SEP}bar`, value: bar },
|
||||
{ key: `notes/spring/1.txt${VID_SEP}foo`, value: foo },
|
||||
{ key: `notes/spring/1.txt${VID_SEP}qux`, value: qux },
|
||||
{ key: `notes/spring/2.txt${VID_SEP}bar`, value: valueDeleteMarker },
|
||||
{ key: `notes/spring/2.txt${VID_SEP}foo`, value: foo },
|
||||
{ key: 'notes/spring/march/1.txt',
|
||||
value: '{"versionId":"null","isNull":true}' },
|
||||
{ key: `notes/spring/march/1.txt${VID_SEP}bar`, value: bar },
|
||||
{ key: `notes/spring/march/1.txt${VID_SEP}foo`, value: foo },
|
||||
{ key: 'notes/summer/1.txt', value: bar },
|
||||
{ key: `notes/summer/1.txt${VID_SEP}bar`, value: bar },
|
||||
{ key: `notes/summer/1.txt${VID_SEP}foo`, value: foo },
|
||||
{ key: 'notes/summer/2.txt', value: bar },
|
||||
{ key: `notes/summer/2.txt${VID_SEP}bar`, value: bar },
|
||||
{ key: `notes/summer/4.txt${VID_SEP}bar`, value: valueDeleteMarker },
|
||||
{ key: `notes/summer/4.txt${VID_SEP}foo`, value: valueDeleteMarker },
|
||||
{ key: `notes/summer/4.txt${VID_SEP}qux`, value: valueDeleteMarker },
|
||||
// Compared to v0, the two following keys are version keys
|
||||
// that we give a version ID, because delete markers do not
|
||||
// have a master key in v1.
|
||||
{ key: `notes/summer/444.txt${VID_SEP}null`, value: valueDeleteMarker },
|
||||
{ key: `notes/summer/44444.txt${VID_SEP}null`, value: valueDeleteMarker },
|
||||
{ key: 'notes/summer/august/1.txt', value },
|
||||
{ key: 'notes/year.txt', value },
|
||||
{ key: 'notes/yore.rs', value },
|
||||
{ key: 'notes/zaphod/Beeblebrox.txt', value },
|
||||
],
|
||||
};
|
||||
const dataVersioned = [
|
||||
{ key: 'Pâtisserie=中文-español-English', value: bar },
|
||||
{ key: 'Pâtisserie=中文-español-English\0bar', value: bar },
|
||||
{ key: 'Pâtisserie=中文-español-English\0foo', value: foo },
|
||||
{ key: 'notes/spring/1.txt', value: bar },
|
||||
{ key: 'notes/spring/1.txt\0bar', value: bar },
|
||||
{ key: 'notes/spring/1.txt\0foo', value: foo },
|
||||
{ key: 'notes/spring/1.txt\0qux', value: qux },
|
||||
{ key: 'notes/spring/2.txt', value: valuePHD },
|
||||
{ key: 'notes/spring/2.txt\0bar', value: valueDeleteMarker },
|
||||
{ key: 'notes/spring/2.txt\0foo', value: foo },
|
||||
{ key: 'notes/spring/march/1.txt',
|
||||
value: '{"versionId":"null","isNull":true}' },
|
||||
{ key: 'notes/spring/march/1.txt\0bar', value: bar },
|
||||
{ key: 'notes/spring/march/1.txt\0foo', value: foo },
|
||||
{ key: 'notes/summer/1.txt', value: bar },
|
||||
{ key: 'notes/summer/1.txt\0bar', value: bar },
|
||||
{ key: 'notes/summer/1.txt\0foo', value: foo },
|
||||
{ key: 'notes/summer/2.txt', value: bar },
|
||||
{ key: 'notes/summer/2.txt\0bar', value: bar },
|
||||
{ key: 'notes/summer/4.txt', value: valuePHD },
|
||||
{ key: 'notes/summer/4.txt\0bar', value: valueDeleteMarker },
|
||||
{ key: 'notes/summer/4.txt\0foo', value: valueDeleteMarker },
|
||||
{ key: 'notes/summer/4.txt\0qux', value: valueDeleteMarker },
|
||||
{ key: 'notes/summer/44.txt', value: valuePHD },
|
||||
{ key: 'notes/summer/444.txt', value: valueDeleteMarker },
|
||||
{ key: 'notes/summer/4444.txt', value: valuePHD },
|
||||
{ key: 'notes/summer/44444.txt', value: valueDeleteMarker },
|
||||
{ key: 'notes/summer/444444.txt', value: valuePHD },
|
||||
{ key: 'notes/summer/august/1.txt', value },
|
||||
{ key: 'notes/year.txt', value },
|
||||
{ key: 'notes/yore.rs', value },
|
||||
{ key: 'notes/zaphod/Beeblebrox.txt', value },
|
||||
];
|
||||
const receivedData = [
|
||||
{ key: 'Pâtisserie=中文-español-English', value: bar, versionId: 'bar' },
|
||||
{ key: 'Pâtisserie=中文-español-English', value: foo, versionId: 'foo' },
|
||||
|
@ -130,10 +90,6 @@ const receivedData = [
|
|||
];
|
||||
const tests = [
|
||||
new Test('all versions', {}, {
|
||||
v0: {},
|
||||
v1: [{ gte: DbPrefixes.Master, lt: inc(DbPrefixes.Master) },
|
||||
{ gte: DbPrefixes.Version, lt: inc(DbPrefixes.Version) }],
|
||||
}, {
|
||||
Versions: receivedData,
|
||||
CommonPrefixes: [],
|
||||
Delimiter: undefined,
|
||||
|
@ -143,17 +99,6 @@ const tests = [
|
|||
}),
|
||||
new Test('with valid key marker', {
|
||||
keyMarker: receivedData[3].key,
|
||||
}, {
|
||||
v0: {
|
||||
gt: `${receivedData[3].key}\u0001`,
|
||||
},
|
||||
v1: [{
|
||||
gt: `${DbPrefixes.Master}${receivedData[3].key}${inc(VID_SEP)}`,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
}, {
|
||||
gt: `${DbPrefixes.Version}${receivedData[3].key}${inc(VID_SEP)}`,
|
||||
lt: inc(DbPrefixes.Version),
|
||||
}],
|
||||
}, {
|
||||
Versions: receivedData.slice(5),
|
||||
CommonPrefixes: [],
|
||||
|
@ -165,17 +110,6 @@ const tests = [
|
|||
new Test('with bad key marker', {
|
||||
keyMarker: 'zzzz',
|
||||
delimiter: '/',
|
||||
}, {
|
||||
v0: {
|
||||
gt: `zzzz${inc(VID_SEP)}`,
|
||||
},
|
||||
v1: [{
|
||||
gt: `${DbPrefixes.Master}zzzz${inc(VID_SEP)}`,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
}, {
|
||||
gt: `${DbPrefixes.Version}zzzz${inc(VID_SEP)}`,
|
||||
lt: inc(DbPrefixes.Version),
|
||||
}],
|
||||
}, {
|
||||
Versions: [],
|
||||
CommonPrefixes: [],
|
||||
|
@ -186,15 +120,6 @@ const tests = [
|
|||
}, (e, input) => e.key > input.keyMarker),
|
||||
new Test('with maxKeys', {
|
||||
maxKeys: 3,
|
||||
}, {
|
||||
v0: {},
|
||||
v1: [{
|
||||
gte: DbPrefixes.Master,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
}, {
|
||||
gte: DbPrefixes.Version,
|
||||
lt: inc(DbPrefixes.Version),
|
||||
}],
|
||||
}, {
|
||||
Versions: receivedData.slice(0, 3),
|
||||
CommonPrefixes: [],
|
||||
|
@ -205,15 +130,6 @@ const tests = [
|
|||
}),
|
||||
new Test('with big maxKeys', {
|
||||
maxKeys: 15000,
|
||||
}, {
|
||||
v0: {},
|
||||
v1: [{
|
||||
gte: DbPrefixes.Master,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
}, {
|
||||
gte: DbPrefixes.Version,
|
||||
lt: inc(DbPrefixes.Version),
|
||||
}],
|
||||
}, {
|
||||
Versions: receivedData,
|
||||
CommonPrefixes: [],
|
||||
|
@ -224,15 +140,6 @@ const tests = [
|
|||
}),
|
||||
new Test('with delimiter', {
|
||||
delimiter: '/',
|
||||
}, {
|
||||
v0: {},
|
||||
v1: [{
|
||||
gte: DbPrefixes.Master,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
}, {
|
||||
gte: DbPrefixes.Version,
|
||||
lt: inc(DbPrefixes.Version),
|
||||
}],
|
||||
}, {
|
||||
Versions: [
|
||||
receivedData[0],
|
||||
|
@ -246,15 +153,6 @@ const tests = [
|
|||
}),
|
||||
new Test('with long delimiter', {
|
||||
delimiter: 'notes/summer',
|
||||
}, {
|
||||
v0: {},
|
||||
v1: [{
|
||||
gte: DbPrefixes.Master,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
}, {
|
||||
gte: DbPrefixes.Version,
|
||||
lt: inc(DbPrefixes.Version),
|
||||
}],
|
||||
}, {
|
||||
Versions: receivedData.filter(entry =>
|
||||
entry.key.indexOf('notes/summer') < 0),
|
||||
|
@ -268,18 +166,6 @@ const tests = [
|
|||
delimiter: '/',
|
||||
prefix: 'notes/summer/',
|
||||
keyMarker: 'notes/summer0',
|
||||
}, {
|
||||
v0: {
|
||||
gt: `notes/summer0${inc(VID_SEP)}`,
|
||||
lt: `notes/summer${inc('/')}`,
|
||||
},
|
||||
v1: [{
|
||||
gt: `${DbPrefixes.Master}notes/summer0${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
||||
}, {
|
||||
gt: `${DbPrefixes.Version}notes/summer0${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Version}notes/summer${inc('/')}`,
|
||||
}],
|
||||
}, {
|
||||
Versions: [],
|
||||
CommonPrefixes: [],
|
||||
|
@ -291,18 +177,6 @@ const tests = [
|
|||
new Test('delimiter and prefix (related to #147)', {
|
||||
delimiter: '/',
|
||||
prefix: 'notes/',
|
||||
}, {
|
||||
v0: {
|
||||
gte: 'notes/',
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: [{
|
||||
gte: `${DbPrefixes.Master}notes/`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
}, {
|
||||
gte: `${DbPrefixes.Version}notes/`,
|
||||
lt: `${DbPrefixes.Version}notes${inc('/')}`,
|
||||
}],
|
||||
}, {
|
||||
Versions: [
|
||||
receivedData[19],
|
||||
|
@ -322,18 +196,6 @@ const tests = [
|
|||
delimiter: '/',
|
||||
prefix: 'notes/',
|
||||
keyMarker: 'notes/year.txt',
|
||||
}, {
|
||||
v0: {
|
||||
gt: `notes/year.txt${inc(VID_SEP)}`,
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: [{
|
||||
gt: `${DbPrefixes.Master}notes/year.txt${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
}, {
|
||||
gt: `${DbPrefixes.Version}notes/year.txt${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Version}notes${inc('/')}`,
|
||||
}],
|
||||
}, {
|
||||
Versions: [
|
||||
receivedData[20],
|
||||
|
@ -351,18 +213,6 @@ const tests = [
|
|||
prefix: 'notes/',
|
||||
keyMarker: 'notes/',
|
||||
maxKeys: 1,
|
||||
}, {
|
||||
v0: {
|
||||
gt: `notes/${inc(VID_SEP)}`,
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: [{
|
||||
gt: `${DbPrefixes.Master}notes/${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
}, {
|
||||
gt: `${DbPrefixes.Version}notes/${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Version}notes${inc('/')}`,
|
||||
}],
|
||||
}, {
|
||||
Versions: [],
|
||||
CommonPrefixes: ['notes/spring/'],
|
||||
|
@ -377,18 +227,6 @@ const tests = [
|
|||
prefix: 'notes/', // prefix
|
||||
keyMarker: 'notes/spring/',
|
||||
maxKeys: 1,
|
||||
}, {
|
||||
v0: {
|
||||
gt: `notes/spring/${inc(VID_SEP)}`,
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: [{
|
||||
gt: `${DbPrefixes.Master}notes/spring/${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
}, {
|
||||
gt: `${DbPrefixes.Version}notes/spring/${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Version}notes${inc('/')}`,
|
||||
}],
|
||||
}, {
|
||||
Versions: [],
|
||||
CommonPrefixes: ['notes/summer/'],
|
||||
|
@ -403,18 +241,6 @@ const tests = [
|
|||
prefix: 'notes/', // prefix
|
||||
keyMarker: 'notes/summer/',
|
||||
maxKeys: 1,
|
||||
}, {
|
||||
v0: {
|
||||
gt: `notes/summer/${inc(VID_SEP)}`,
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: [{
|
||||
gt: `${DbPrefixes.Master}notes/summer/${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
}, {
|
||||
gt: `${DbPrefixes.Version}notes/summer/${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Version}notes${inc('/')}`,
|
||||
}],
|
||||
}, {
|
||||
Versions: [
|
||||
receivedData[19],
|
||||
|
@ -431,18 +257,6 @@ const tests = [
|
|||
prefix: 'notes/', // prefix
|
||||
keyMarker: 'notes/year.txt',
|
||||
maxKeys: 1,
|
||||
}, {
|
||||
v0: {
|
||||
gt: `notes/year.txt${inc(VID_SEP)}`,
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: [{
|
||||
gt: `${DbPrefixes.Master}notes/year.txt${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
}, {
|
||||
gt: `${DbPrefixes.Version}notes/year.txt${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Version}notes${inc('/')}`,
|
||||
}],
|
||||
}, {
|
||||
Versions: [
|
||||
receivedData[20],
|
||||
|
@ -459,18 +273,6 @@ const tests = [
|
|||
prefix: 'notes/',
|
||||
keyMarker: 'notes/yore.rs',
|
||||
maxKeys: 1,
|
||||
}, {
|
||||
v0: {
|
||||
gt: `notes/yore.rs${inc(VID_SEP)}`,
|
||||
lt: `notes${inc('/')}`,
|
||||
},
|
||||
v1: [{
|
||||
gt: `${DbPrefixes.Master}notes/yore.rs${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||
}, {
|
||||
gt: `${DbPrefixes.Version}notes/yore.rs${inc(VID_SEP)}`,
|
||||
lt: `${DbPrefixes.Version}notes${inc('/')}`,
|
||||
}],
|
||||
}, {
|
||||
Versions: [],
|
||||
CommonPrefixes: ['notes/zaphod/'],
|
||||
|
@ -481,51 +283,24 @@ const tests = [
|
|||
}, (e, input) => e.key > input.keyMarker),
|
||||
];
|
||||
|
||||
function getTestListing(test, data, vFormat) {
|
||||
return data
|
||||
.filter(e => test.filter(e, test.input))
|
||||
.map(e => {
|
||||
if (vFormat === 'v0') {
|
||||
return e;
|
||||
}
|
||||
if (vFormat === 'v1') {
|
||||
const keyPrefix = e.key.includes(VID_SEP) ?
|
||||
DbPrefixes.Version : DbPrefixes.Master;
|
||||
return {
|
||||
key: `${keyPrefix}${e.key}`,
|
||||
value: e.value,
|
||||
};
|
||||
}
|
||||
return assert.fail(`bad format ${vFormat}`);
|
||||
});
|
||||
}
|
||||
describe('Delimiter All Versions listing algorithm', () => {
|
||||
it('Should return good skipping value for DelimiterVersions', done => {
|
||||
const delimiter = new DelimiterVersions({ delimiter: '/' });
|
||||
for (let i = 0; i < 100; i++) {
|
||||
delimiter.filter({ key: `foo/${zpad(i)}`, value: '{}' });
|
||||
}
|
||||
assert.strictEqual(delimiter.skipping(), 'foo/');
|
||||
done();
|
||||
});
|
||||
|
||||
['v0', 'v1'].forEach(vFormat => {
|
||||
describe(`Delimiter All Versions listing algorithm vFormat=${vFormat}`, () => {
|
||||
it('Should return good skipping value for DelimiterVersions', () => {
|
||||
const delimiter = new DelimiterVersions({ delimiter: '/' });
|
||||
for (let i = 0; i < 100; i++) {
|
||||
delimiter.filter({
|
||||
key: `${vFormat === 'v1' ? DbPrefixes.Master : ''}foo/${zpad(i)}`,
|
||||
value: '{}',
|
||||
});
|
||||
}
|
||||
assert.strictEqual(delimiter.skipping(),
|
||||
`${vFormat === 'v1' ? DbPrefixes.Master : ''}foo/`);
|
||||
});
|
||||
|
||||
tests.forEach(test => {
|
||||
it(`Should return metadata listing params to list ${test.name}`, () => {
|
||||
const listing = new DelimiterVersions(test.input, logger, vFormat);
|
||||
const params = listing.genMDParams();
|
||||
assert.deepStrictEqual(params, test.genMDParams[vFormat]);
|
||||
});
|
||||
it(`Should list ${test.name}`, () => {
|
||||
// Simulate skip scan done by LevelDB
|
||||
const d = getTestListing(test, dataVersioned[vFormat], vFormat);
|
||||
const res = performListing(d, DelimiterVersions, test.input, logger, vFormat);
|
||||
assert.deepStrictEqual(res, test.output);
|
||||
});
|
||||
tests.forEach(test => {
|
||||
it(`Should list ${test.name}`, done => {
|
||||
// Simulate skip scan done by LevelDB
|
||||
const d = dataVersioned.filter(e => test.filter(e, test.input));
|
||||
const res =
|
||||
performListing(d, DelimiterVersions, test.input, logger);
|
||||
assert.deepStrictEqual(res, test.output);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -2,10 +2,7 @@
|
|||
|
||||
const assert = require('assert');
|
||||
|
||||
const { checkLimit, inc, listingParamsMasterKeysV0ToV1 } =
|
||||
require('../../../../lib/algos/list/tools');
|
||||
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
|
||||
const { DbPrefixes } = VSConst;
|
||||
const checkLimit = require('../../../../lib/algos/list/tools').checkLimit;
|
||||
|
||||
describe('checkLimit function', () => {
|
||||
const tests = [
|
||||
|
@ -26,79 +23,3 @@ describe('checkLimit function', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('listingParamsMasterKeysV0ToV1', () => {
|
||||
const testCases = [
|
||||
{
|
||||
v0params: {},
|
||||
v1params: {
|
||||
gte: DbPrefixes.Master,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
},
|
||||
{
|
||||
v0params: {
|
||||
gt: 'foo/bar',
|
||||
},
|
||||
v1params: {
|
||||
gt: `${DbPrefixes.Master}foo/bar`,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
},
|
||||
{
|
||||
v0params: {
|
||||
gte: 'foo/bar',
|
||||
},
|
||||
v1params: {
|
||||
gte: `${DbPrefixes.Master}foo/bar`,
|
||||
lt: inc(DbPrefixes.Master),
|
||||
},
|
||||
},
|
||||
{
|
||||
v0params: {
|
||||
lt: 'foo/bar',
|
||||
},
|
||||
v1params: {
|
||||
gte: DbPrefixes.Master,
|
||||
lt: `${DbPrefixes.Master}foo/bar`,
|
||||
},
|
||||
},
|
||||
{
|
||||
v0params: {
|
||||
lte: 'foo/bar',
|
||||
},
|
||||
v1params: {
|
||||
gte: DbPrefixes.Master,
|
||||
lte: `${DbPrefixes.Master}foo/bar`,
|
||||
},
|
||||
},
|
||||
{
|
||||
v0params: {
|
||||
gt: 'baz/qux',
|
||||
lt: 'foo/bar',
|
||||
},
|
||||
v1params: {
|
||||
gt: `${DbPrefixes.Master}baz/qux`,
|
||||
lt: `${DbPrefixes.Master}foo/bar`,
|
||||
},
|
||||
},
|
||||
{
|
||||
v0params: {
|
||||
gte: 'baz/qux',
|
||||
lte: 'foo/bar',
|
||||
limit: 5,
|
||||
},
|
||||
v1params: {
|
||||
gte: `${DbPrefixes.Master}baz/qux`,
|
||||
lte: `${DbPrefixes.Master}foo/bar`,
|
||||
limit: 5,
|
||||
},
|
||||
},
|
||||
];
|
||||
testCases.forEach(({ v0params, v1params }) => {
|
||||
it(`${JSON.stringify(v0params)} => ${JSON.stringify(v1params)}`, () => {
|
||||
const converted = listingParamsMasterKeysV0ToV1(v0params);
|
||||
assert.deepStrictEqual(converted, v1params);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,260 +0,0 @@
|
|||
const assert = require('assert');
|
||||
const stream = require('stream');
|
||||
const MergeStream = require('../../../../lib/algos/stream/MergeStream');
|
||||
|
||||
class Streamify extends stream.Readable {
|
||||
constructor(objectsToSend, errorAtEnd) {
|
||||
super({ objectMode: true });
|
||||
this._remaining = Array.from(objectsToSend);
|
||||
this._remaining.reverse();
|
||||
this._errorAtEnd = errorAtEnd || false;
|
||||
this._ended = false;
|
||||
this._destroyed = false;
|
||||
}
|
||||
|
||||
_read() {
|
||||
process.nextTick(() => {
|
||||
while (this._remaining.length > 0) {
|
||||
const item = this._remaining.pop();
|
||||
if (!this.push(item)) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
if (this._errorAtEnd) {
|
||||
return this.emit('error', new Error('OOPS'));
|
||||
}
|
||||
this._ended = true;
|
||||
return this.push(null);
|
||||
});
|
||||
}
|
||||
|
||||
_destroy(err, callback) {
|
||||
this._destroyed = true;
|
||||
callback();
|
||||
}
|
||||
}
|
||||
|
||||
function readAll(stream, usePauseResume, cb) {
|
||||
const result = [];
|
||||
stream.on('data', item => {
|
||||
result.push(item);
|
||||
if (usePauseResume) {
|
||||
stream.pause();
|
||||
setTimeout(() => stream.resume(), 1);
|
||||
}
|
||||
});
|
||||
stream.once('end', () => cb(null, result));
|
||||
stream.once('error', err => cb(err));
|
||||
}
|
||||
|
||||
function compareInt(a, b) {
|
||||
return Math.sign(a - b);
|
||||
}
|
||||
|
||||
function testMergeStreamWithIntegers(contents1, contents2,
|
||||
usePauseResume, errorAtEnd, cb) {
|
||||
const expectedItems = contents1.concat(contents2).sort(compareInt);
|
||||
const mergeStream = new MergeStream(
|
||||
new Streamify(contents1, errorAtEnd)
|
||||
.on('error', () => {}),
|
||||
new Streamify(contents2)
|
||||
.on('error', () => {}),
|
||||
compareInt);
|
||||
readAll(mergeStream, usePauseResume, (err, readItems) => {
|
||||
if (errorAtEnd) {
|
||||
assert(err);
|
||||
} else {
|
||||
assert.ifError(err);
|
||||
assert.deepStrictEqual(readItems, expectedItems);
|
||||
}
|
||||
cb();
|
||||
});
|
||||
}
|
||||
|
||||
function testCasePretty(testCase, reversed) {
|
||||
const desc1 = JSON.stringify(
|
||||
reversed ? testCase.stream2 : testCase.stream1);
|
||||
const desc2 = JSON.stringify(
|
||||
reversed ? testCase.stream1 : testCase.stream2);
|
||||
return `${desc1} merged with ${desc2}`;
|
||||
}
|
||||
|
||||
describe('MergeStream', () => {
|
||||
[
|
||||
{
|
||||
stream1: [],
|
||||
stream2: [],
|
||||
},
|
||||
{
|
||||
stream1: [0],
|
||||
stream2: [],
|
||||
},
|
||||
{
|
||||
stream1: [0, 1, 2, 3, 4],
|
||||
stream2: [],
|
||||
},
|
||||
{
|
||||
stream1: [0],
|
||||
stream2: [1],
|
||||
},
|
||||
{
|
||||
stream1: [1, 2, 3, 4, 5],
|
||||
stream2: [0],
|
||||
},
|
||||
{
|
||||
stream1: [0, 1, 2, 3, 4],
|
||||
stream2: [5],
|
||||
},
|
||||
{
|
||||
stream1: [1, 2],
|
||||
stream2: [3, 4, 5],
|
||||
},
|
||||
{
|
||||
stream1: [1, 2, 3],
|
||||
stream2: [4, 5],
|
||||
},
|
||||
{
|
||||
stream1: [1, 3, 5, 7, 9],
|
||||
stream2: [2, 4, 6, 8, 10],
|
||||
},
|
||||
{
|
||||
stream1: [1, 4, 7],
|
||||
stream2: [0, 2, 3, 5, 6, 8, 9, 10],
|
||||
},
|
||||
{
|
||||
stream1: [0, 10],
|
||||
stream2: [1, 2, 3, 4, 5, 6, 7, 8, 9],
|
||||
},
|
||||
{
|
||||
stream1: [4, 5, 6],
|
||||
stream2: [1, 2, 3, 7, 8, 9],
|
||||
},
|
||||
{
|
||||
stream1: [0],
|
||||
stream2: [0],
|
||||
},
|
||||
{
|
||||
stream1: [0, 1],
|
||||
stream2: [0, 1],
|
||||
},
|
||||
{
|
||||
stream1: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
|
||||
stream2: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
|
||||
},
|
||||
{
|
||||
stream1: [0, 2, 3, 4],
|
||||
stream2: [0, 1, 2, 4],
|
||||
},
|
||||
{
|
||||
stream1: [0, 1, 2, 3],
|
||||
stream2: [1, 2, 3, 4],
|
||||
},
|
||||
{
|
||||
stream1: [0, 1, 2, 3],
|
||||
stream2: [2, 3, 4, 5, 6, 7],
|
||||
},
|
||||
{
|
||||
stream1: [0, 1, 2, 3],
|
||||
stream2: [3, 4, 5, 6],
|
||||
},
|
||||
{
|
||||
stream1: [0, 1, 2, 3],
|
||||
stream2: [0, 3],
|
||||
},
|
||||
].forEach(testCase => {
|
||||
[false, true].forEach(usePauseResume => {
|
||||
[false, true].forEach(errorAtEnd => {
|
||||
const testDesc =
|
||||
`${testCasePretty(testCase, false)}` +
|
||||
`${usePauseResume ? ' with pause/resume' : ''}` +
|
||||
`${errorAtEnd ? ' with error' : ''}`;
|
||||
it(`should cover ${testDesc}`, done => {
|
||||
testMergeStreamWithIntegers(
|
||||
testCase.stream1, testCase.stream2,
|
||||
usePauseResume, errorAtEnd, done);
|
||||
});
|
||||
it(`should cover ${testDesc}`, done => {
|
||||
testMergeStreamWithIntegers(
|
||||
testCase.stream2, testCase.stream1,
|
||||
usePauseResume, errorAtEnd, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
[100, 1000, 10000, 100000].forEach(nbEntries => {
|
||||
[false, true].forEach(usePauseResume => {
|
||||
[false, true].forEach(errorAtEnd => {
|
||||
if ((!usePauseResume && !errorAtEnd) || nbEntries <= 1000) {
|
||||
const fixtureDesc =
|
||||
`${usePauseResume ? ' with pause/resume' : ''}` +
|
||||
`${errorAtEnd ? ' with error' : ''}`;
|
||||
it(`${nbEntries} sequential entries${fixtureDesc}`,
|
||||
function bigMergeSequential(done) {
|
||||
this.timeout(10000);
|
||||
const stream1 = [];
|
||||
const stream2 = [];
|
||||
for (let i = 0; i < nbEntries; ++i) {
|
||||
if (Math.floor(i / (nbEntries / 10)) % 2 === 0) {
|
||||
stream1.push(i);
|
||||
} else {
|
||||
stream2.push(i);
|
||||
}
|
||||
}
|
||||
testMergeStreamWithIntegers(
|
||||
stream1, stream2, usePauseResume, errorAtEnd, done);
|
||||
});
|
||||
it(`${nbEntries} randomly mingled entries${fixtureDesc}`,
|
||||
function bigMergeRandom(done) {
|
||||
this.timeout(10000);
|
||||
const stream1 = [];
|
||||
const stream2 = [];
|
||||
let accu = nbEntries;
|
||||
for (let i = 0; i < nbEntries; ++i) {
|
||||
// picked two large arbitrary prime numbers to get a
|
||||
// deterministic random-looking series
|
||||
accu = (accu * 1592760451) % 8448053;
|
||||
if (accu % 2 === 0) {
|
||||
stream1.push(i);
|
||||
} else {
|
||||
stream2.push(i);
|
||||
}
|
||||
}
|
||||
testMergeStreamWithIntegers(
|
||||
stream1, stream2, usePauseResume, errorAtEnd, done);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
// with 3 items per input stream, we reach the end of stream even
|
||||
// though destroy() has been called (due to buffering), while with
|
||||
// 100 items input streams are aborted before emitting the 'end'
|
||||
// event, so it's useful to test both cases
|
||||
[3, 100].forEach(nbItemsPerStream => {
|
||||
it(`destroy() should destroy both inner streams with ${nbItemsPerStream} items per stream`,
|
||||
done => {
|
||||
const stream1 = new Streamify(new Array(nbItemsPerStream).fill().map((e, i) => 2 * i));
|
||||
const stream2 = new Streamify(new Array(nbItemsPerStream).fill().map((e, i) => 1 + 2 * i));
|
||||
const mergeStream = new MergeStream(stream1, stream2, compareInt);
|
||||
mergeStream.on('data', item => {
|
||||
if (item === 5) {
|
||||
mergeStream.destroy();
|
||||
const s1ended = stream1._ended;
|
||||
const s2ended = stream2._ended;
|
||||
setTimeout(() => {
|
||||
if (!s1ended) {
|
||||
assert(stream1._destroyed);
|
||||
}
|
||||
if (!s2ended) {
|
||||
assert(stream2._destroyed);
|
||||
}
|
||||
done();
|
||||
}, 10);
|
||||
}
|
||||
});
|
||||
mergeStream.once('error', err => {
|
||||
assert.fail(`unexpected error: ${err.message}`);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,5 +1,4 @@
|
|||
const assert = require('assert');
|
||||
const sinon = require('sinon');
|
||||
|
||||
const queryAuthCheck =
|
||||
require('../../../../lib/auth/v2/queryAuthCheck').check;
|
||||
|
@ -27,97 +26,3 @@ describe('v2: queryAuthCheck', () => {
|
|||
}
|
||||
}));
|
||||
});
|
||||
|
||||
describe('v2: queryAuthCheck', () => {
|
||||
let clock;
|
||||
|
||||
beforeEach(() => {
|
||||
clock = sinon.useFakeTimers();
|
||||
});
|
||||
afterEach(() => {
|
||||
process.env.PRE_SIGN_URL_EXPIRY = 604800000;
|
||||
clock.restore();
|
||||
});
|
||||
it('URL should not expire before 7 days with default expiry', () => {
|
||||
const currentTime = Date.now() / 1000;
|
||||
const expires = currentTime + 604799; // in seconds
|
||||
const mockRequest = {
|
||||
method: 'GET',
|
||||
url: 'mockurl',
|
||||
query: {
|
||||
Expires: expires,
|
||||
},
|
||||
headers: {
|
||||
'Content-MD5': 'c',
|
||||
},
|
||||
};
|
||||
const data = {
|
||||
Expires: expires,
|
||||
AWSAccessKeyId: 'keyId',
|
||||
Signature: 'sign',
|
||||
};
|
||||
const res = queryAuthCheck(mockRequest, log, data);
|
||||
assert.notStrictEqual(res.err.AccessDenied, true);
|
||||
assert.notStrictEqual(res.err.RequestTimeTooSkewed, true);
|
||||
});
|
||||
it('URL should expire after 7 days with default expiry', () => {
|
||||
clock.tick(604800000); // take time 604800000ms (7 days) ahead
|
||||
const currentTime = Date.now();
|
||||
const request = { method: 'GET', query: { Expires: currentTime } };
|
||||
const data = { Expires: currentTime };
|
||||
const res = queryAuthCheck(request, log, data);
|
||||
assert.notStrictEqual(res.err, null);
|
||||
assert.notStrictEqual(res.err, undefined);
|
||||
assert.strictEqual(res.err.AccessDenied, true);
|
||||
});
|
||||
it('URL should not expire before 7 days with custom expiry', () => {
|
||||
process.env.PRE_SIGN_URL_EXPIRY = 31556952000; // in ms (1 year)
|
||||
const currentTime = Date.now() / 1000;
|
||||
const expires = currentTime + 604799; // in seconds
|
||||
const mockRequest = {
|
||||
method: 'GET',
|
||||
url: 'mockurl',
|
||||
query: {
|
||||
Expires: expires,
|
||||
},
|
||||
headers: {
|
||||
'Content-MD5': 'c',
|
||||
},
|
||||
};
|
||||
const data = {
|
||||
Expires: expires,
|
||||
AWSAccessKeyId: 'keyId',
|
||||
Signature: 'sign',
|
||||
};
|
||||
const res = queryAuthCheck(mockRequest, log, data);
|
||||
assert.notStrictEqual(res.err.AccessDenied, true);
|
||||
assert.notStrictEqual(res.err.RequestTimeTooSkewed, true);
|
||||
});
|
||||
it('URL should still not expire after 7 days with custom expiry', () => {
|
||||
clock.tick(604800000); // take time 604800000ms (7 days) ahead
|
||||
process.env.PRE_SIGN_URL_EXPIRY = 31556952000; // in ms (1 year)
|
||||
const currentTime = Date.now() / 1000;
|
||||
const request = { method: 'GET', query: { Expires: currentTime } };
|
||||
const data = { Expires: currentTime };
|
||||
const res = queryAuthCheck(request, log, data);
|
||||
assert.notStrictEqual(res.err.AccessDenied, true);
|
||||
});
|
||||
it('should return RequestTimeTooSkewed with current time > expiry', () => {
|
||||
clock.tick(123);
|
||||
const expires = 0;
|
||||
const request = { method: 'GET', query: { Expires: expires } };
|
||||
const data = { Expires: expires };
|
||||
const res = queryAuthCheck(request, log, data);
|
||||
assert.notStrictEqual(res.err, null);
|
||||
assert.notStrictEqual(res.err, undefined);
|
||||
assert.strictEqual(res.err.RequestTimeTooSkewed, true);
|
||||
});
|
||||
it('should return MissingSecurityHeader with invalid expires param', () => {
|
||||
const request = { method: 'GET', query: { Expires: 'a string' } };
|
||||
const data = { Expires: 'a string' };
|
||||
const res = queryAuthCheck(request, log, data);
|
||||
assert.notStrictEqual(res.err, null);
|
||||
assert.notStrictEqual(res.err, undefined);
|
||||
assert.strictEqual(res.err.MissingSecurityHeader, true);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -127,17 +127,8 @@ const testBucketPolicy = {
|
|||
},
|
||||
],
|
||||
};
|
||||
|
||||
const testobjectLockEnabled = false;
|
||||
|
||||
const testObjectLockConfiguration = {
|
||||
rule: {
|
||||
mode: 'GOVERNANCE',
|
||||
days: 1,
|
||||
},
|
||||
};
|
||||
|
||||
// create a dummy bucket to test getters and setters
|
||||
|
||||
Object.keys(acl).forEach(
|
||||
aclObj => describe(`different acl configurations : ${aclObj}`, () => {
|
||||
const dummyBucket = new BucketInfo(
|
||||
|
@ -154,9 +145,7 @@ Object.keys(acl).forEach(
|
|||
testCorsConfiguration,
|
||||
testReplicationConfiguration,
|
||||
testLifecycleConfiguration,
|
||||
testBucketPolicy,
|
||||
testobjectLockEnabled,
|
||||
testObjectLockConfiguration);
|
||||
testBucketPolicy);
|
||||
|
||||
describe('serialize/deSerialize on BucketInfo class', () => {
|
||||
const serialized = dummyBucket.serialize();
|
||||
|
@ -183,9 +172,6 @@ Object.keys(acl).forEach(
|
|||
lifecycleConfiguration:
|
||||
dummyBucket._lifecycleConfiguration,
|
||||
bucketPolicy: dummyBucket._bucketPolicy,
|
||||
objectLockEnabled: dummyBucket._objectLockEnabled,
|
||||
objectLockConfiguration:
|
||||
dummyBucket._objectLockConfiguration,
|
||||
};
|
||||
assert.strictEqual(serialized, JSON.stringify(bucketInfos));
|
||||
done();
|
||||
|
@ -202,16 +188,15 @@ Object.keys(acl).forEach(
|
|||
});
|
||||
|
||||
describe('constructor', () => {
|
||||
it('this should have the right BucketInfo types', () => {
|
||||
assert.strictEqual(typeof dummyBucket.getName(), 'string');
|
||||
assert.strictEqual(typeof dummyBucket.getOwner(), 'string');
|
||||
assert.strictEqual(typeof dummyBucket.getOwnerDisplayName(),
|
||||
'string');
|
||||
assert.strictEqual(typeof dummyBucket.getCreationDate(),
|
||||
'string');
|
||||
assert.strictEqual(typeof dummyBucket.isObjectLockEnabled(),
|
||||
'boolean');
|
||||
});
|
||||
it('this should have the right BucketInfo types',
|
||||
() => {
|
||||
assert.strictEqual(typeof dummyBucket.getName(), 'string');
|
||||
assert.strictEqual(typeof dummyBucket.getOwner(), 'string');
|
||||
assert.strictEqual(typeof dummyBucket.getOwnerDisplayName(),
|
||||
'string');
|
||||
assert.strictEqual(typeof dummyBucket.getCreationDate(),
|
||||
'string');
|
||||
});
|
||||
it('this should have the right acl\'s types', () => {
|
||||
assert.strictEqual(typeof dummyBucket.getAcl(), 'object');
|
||||
assert.strictEqual(
|
||||
|
@ -290,14 +275,6 @@ Object.keys(acl).forEach(
|
|||
assert.deepStrictEqual(
|
||||
dummyBucket.getBucketPolicy(), testBucketPolicy);
|
||||
});
|
||||
it('object lock should be disabled by default', () => {
|
||||
assert.deepStrictEqual(
|
||||
dummyBucket.isObjectLockEnabled(), false);
|
||||
});
|
||||
it('getObjectLockConfiguration should return configuration', () => {
|
||||
assert.deepStrictEqual(dummyBucket.getObjectLockConfiguration(),
|
||||
testObjectLockConfiguration);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setters on BucketInfo class', () => {
|
||||
|
@ -435,25 +412,6 @@ Object.keys(acl).forEach(
|
|||
assert.deepStrictEqual(
|
||||
dummyBucket.getBucketPolicy(), newBucketPolicy);
|
||||
});
|
||||
it('setObjectLockConfiguration should set object lock ' +
|
||||
'configuration', () => {
|
||||
const newObjectLockConfig = {
|
||||
rule: {
|
||||
mode: 'COMPLIANCE',
|
||||
years: 1,
|
||||
},
|
||||
};
|
||||
dummyBucket.setObjectLockConfiguration(newObjectLockConfig);
|
||||
assert.deepStrictEqual(dummyBucket.getObjectLockConfiguration(),
|
||||
newObjectLockConfig);
|
||||
});
|
||||
[true, false].forEach(bool => {
|
||||
it('setObjectLockEnabled should set object lock status', () => {
|
||||
dummyBucket.setObjectLockEnabled(bool);
|
||||
assert.deepStrictEqual(dummyBucket.isObjectLockEnabled(),
|
||||
bool);
|
||||
});
|
||||
});
|
||||
});
|
||||
})
|
||||
);
|
||||
|
|
|
@ -48,15 +48,6 @@ describe('BucketPolicy class getBucketPolicy', () => {
|
|||
done();
|
||||
});
|
||||
|
||||
it('should return MalformedPolicy error if request action is for objects ' +
|
||||
'but does\'t include \'Object\' and resource refers to bucket', done => {
|
||||
const newPolicy = createPolicy('Action', 's3:AbortMultipartUpload');
|
||||
const bucketPolicy = new BucketPolicy(JSON.stringify(newPolicy))
|
||||
.getBucketPolicy();
|
||||
checkErr(bucketPolicy, 'MalformedPolicy', mismatchErr);
|
||||
done();
|
||||
});
|
||||
|
||||
it('should return MalformedPolicy error if request action is for objects ' +
|
||||
'(with wildcard) but resource refers to bucket', done => {
|
||||
const newPolicy = createPolicy('Action', 's3:GetObject*');
|
||||
|
|
|
@ -1,265 +0,0 @@
|
|||
const assert = require('assert');
|
||||
const { parseString } = require('xml2js');
|
||||
|
||||
const ObjectLockConfiguration =
|
||||
require('../../../lib/models/ObjectLockConfiguration.js');
|
||||
|
||||
function checkError(parsedXml, err, errMessage, cb) {
|
||||
const config = new ObjectLockConfiguration(parsedXml).
|
||||
getValidatedObjectLockConfiguration();
|
||||
assert.strictEqual(config.error[err], true);
|
||||
assert.strictEqual(config.error.description, errMessage);
|
||||
cb();
|
||||
}
|
||||
|
||||
function generateRule(testParams) {
|
||||
if (testParams.key === 'Rule') {
|
||||
return `<Rule>${testParams.value}</Rule>`;
|
||||
}
|
||||
if (testParams.key === 'DefaultRetention') {
|
||||
return `<Rule><DefaultRetention>${testParams.value} ` +
|
||||
'</DefaultRetention></Rule>';
|
||||
}
|
||||
const mode = testParams.key === 'Mode' ?
|
||||
`<Mode>${testParams.value}</Mode>` : '<Mode>GOVERNANCE</Mode>';
|
||||
|
||||
let time = '<Days>1</Days>';
|
||||
if (testParams.key === 'Days') {
|
||||
time = `<Days>${testParams.value}</Days>`;
|
||||
}
|
||||
if (testParams.key === 'Years') {
|
||||
time = `<Years>${testParams.value}</Years>`;
|
||||
}
|
||||
if (testParams.key === 'NoRule') {
|
||||
return '';
|
||||
}
|
||||
return `<Rule><DefaultRetention>${mode}${time}</DefaultRetention></Rule>`;
|
||||
}
|
||||
|
||||
function generateXml(testParams) {
|
||||
const Enabled = testParams.key === 'ObjectLockEnabled' ?
|
||||
`<ObjectLockEnabled>${testParams.value}</ObjectLockEnabled>` :
|
||||
'<ObjectLockEnabled>Enabled</ObjectLockEnabled>';
|
||||
const Rule = generateRule(testParams);
|
||||
const ObjectLock = testParams.key === 'ObjectLockConfiguration' ? '' :
|
||||
`<ObjectLockConfiguration>${Enabled}${Rule}` +
|
||||
'</ObjectLockConfiguration>';
|
||||
return ObjectLock;
|
||||
}
|
||||
|
||||
function generateParsedXml(testParams, cb) {
|
||||
const xml = generateXml(testParams);
|
||||
parseString(xml, (err, parsedXml) => {
|
||||
assert.equal(err, null, 'Error parsing xml');
|
||||
cb(parsedXml);
|
||||
});
|
||||
}
|
||||
|
||||
const expectedXml = (daysOrYears, time, mode) =>
|
||||
'<?xml version="1.0" encoding="UTF-8"?>' +
|
||||
'<ObjectLockConfiguration ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<ObjectLockEnabled>Enabled</ObjectLockEnabled>' +
|
||||
'<Rule><DefaultRetention>' +
|
||||
`<Mode>${mode}</Mode>` +
|
||||
`<${daysOrYears}>${time}</${daysOrYears}>` +
|
||||
'</DefaultRetention></Rule>' +
|
||||
'</ObjectLockConfiguration>';
|
||||
|
||||
const failTests = [
|
||||
{
|
||||
name: 'fail with empty configuration',
|
||||
params: { key: 'ObjectLockConfiguration' },
|
||||
error: 'MalformedXML',
|
||||
errorMessage: 'request xml is undefined or empty',
|
||||
},
|
||||
{
|
||||
name: 'fail with empty ObjectLockEnabled',
|
||||
params: { key: 'ObjectLockEnabled', value: '' },
|
||||
error: 'MalformedXML',
|
||||
errorMessage: 'request xml does not include valid ObjectLockEnabled',
|
||||
},
|
||||
{
|
||||
name: 'fail with invalid value for ObjectLockEnabled',
|
||||
params: { key: 'ObjectLockEnabled', value: 'Disabled' },
|
||||
error: 'MalformedXML',
|
||||
errorMessage: 'request xml does not include valid ObjectLockEnabled',
|
||||
},
|
||||
{
|
||||
name: 'fail with empty rule',
|
||||
params: { key: 'Rule', value: '' },
|
||||
error: 'MalformedXML',
|
||||
errorMessage: 'Rule request xml does not contain DefaultRetention',
|
||||
},
|
||||
{
|
||||
name: 'fail with empty DefaultRetention',
|
||||
params: { key: 'DefaultRetention', value: '' },
|
||||
error: 'MalformedXML',
|
||||
errorMessage: 'DefaultRetention request xml does not contain Mode or ' +
|
||||
'retention period (Days or Years)',
|
||||
},
|
||||
{
|
||||
name: 'fail with empty mode',
|
||||
params: { key: 'Mode', value: '' },
|
||||
error: 'MalformedXML',
|
||||
errorMessage: 'request xml does not contain Mode',
|
||||
},
|
||||
{
|
||||
name: 'fail with invalid mode',
|
||||
params: { key: 'Mode', value: 'COMPLOVERNANCE' },
|
||||
error: 'MalformedXML',
|
||||
errorMessage: 'Mode request xml must be one of "GOVERNANCE", ' +
|
||||
'"COMPLIANCE"',
|
||||
},
|
||||
{
|
||||
name: 'fail with lowercase mode',
|
||||
params: { key: 'Mode', value: 'governance' },
|
||||
error: 'MalformedXML',
|
||||
errorMessage: 'Mode request xml must be one of "GOVERNANCE", ' +
|
||||
'"COMPLIANCE"',
|
||||
},
|
||||
{
|
||||
name: 'fail with empty retention period',
|
||||
params: { key: 'Days', value: '' },
|
||||
error: 'MalformedXML',
|
||||
errorMessage: 'request xml does not contain Days or Years',
|
||||
},
|
||||
{
|
||||
name: 'fail with NaN retention period',
|
||||
params: { key: 'Days', value: 'one' },
|
||||
error: 'MalformedXML',
|
||||
errorMessage: 'request xml does not contain valid retention period',
|
||||
},
|
||||
{
|
||||
name: 'fail with retention period less than 1',
|
||||
params: { key: 'Days', value: 0 },
|
||||
error: 'InvalidArgument',
|
||||
errorMessage: 'retention period must be a positive integer',
|
||||
},
|
||||
{
|
||||
name: 'fail with Days retention period greater than 36500',
|
||||
params: { key: 'Days', value: 36501 },
|
||||
error: 'InvalidArgument',
|
||||
errorMessage: 'retention period is too large',
|
||||
},
|
||||
{
|
||||
name: 'fail with Years retention period great than 100',
|
||||
params: { key: 'Years', value: 101 },
|
||||
error: 'InvalidArgument',
|
||||
errorMessage: 'retention period is too large',
|
||||
},
|
||||
];
|
||||
|
||||
const passTests = [
|
||||
{
|
||||
name: 'pass with GOVERNANCE retention mode and valid Days ' +
|
||||
'retention period',
|
||||
params: {},
|
||||
},
|
||||
{
|
||||
name: 'pass with COMPLIANCE retention mode',
|
||||
params: { key: 'Mode', value: 'COMPLIANCE' },
|
||||
},
|
||||
{
|
||||
name: 'pass with valid Years retention period',
|
||||
params: { key: 'Years', value: 1 },
|
||||
},
|
||||
{
|
||||
name: 'pass without Rule',
|
||||
params: { key: 'NoRule' },
|
||||
},
|
||||
];
|
||||
|
||||
const passTestsGetConfigXML = [
|
||||
{
|
||||
config: {
|
||||
rule: {
|
||||
mode: 'COMPLIANCE',
|
||||
days: 90,
|
||||
},
|
||||
},
|
||||
expectedXml: expectedXml('Days', 90, 'COMPLIANCE'),
|
||||
description: 'with COMPLIANCE retention mode ' +
|
||||
'and valid Days retention period',
|
||||
},
|
||||
{
|
||||
config: {
|
||||
rule: {
|
||||
mode: 'GOVERNANCE',
|
||||
days: 30,
|
||||
},
|
||||
},
|
||||
expectedXml: expectedXml('Days', 30, 'GOVERNANCE'),
|
||||
description: 'with GOVERNANCE retention mode ' +
|
||||
'and valid Days retention period',
|
||||
},
|
||||
{
|
||||
config: {
|
||||
rule: {
|
||||
mode: 'COMPLIANCE',
|
||||
years: 1,
|
||||
},
|
||||
},
|
||||
expectedXml: expectedXml('Years', 1, 'COMPLIANCE'),
|
||||
description: 'with COMPLIANCE retention mode ' +
|
||||
'and valid Years retention period',
|
||||
},
|
||||
{
|
||||
config: {
|
||||
rule: {
|
||||
mode: 'GOVERNANCE',
|
||||
years: 2,
|
||||
},
|
||||
},
|
||||
expectedXml: expectedXml('Years', 2, 'GOVERNANCE'),
|
||||
description: 'with GOVERNANCE retention mode ' +
|
||||
'and valid Years retention period',
|
||||
},
|
||||
{
|
||||
config: {},
|
||||
expectedXml: '<?xml version="1.0" encoding="UTF-8"?>' +
|
||||
'<ObjectLockConfiguration ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<ObjectLockEnabled>Enabled</ObjectLockEnabled>' +
|
||||
'</ObjectLockConfiguration>',
|
||||
description: 'without rule if object lock ' +
|
||||
'configuration has not been set',
|
||||
},
|
||||
];
|
||||
|
||||
describe('ObjectLockConfiguration class getValidatedObjectLockConfiguration',
|
||||
() => {
|
||||
it('should return MalformedXML error if request xml is empty', done => {
|
||||
const errMessage = 'request xml is undefined or empty';
|
||||
checkError('', 'MalformedXML', errMessage, done);
|
||||
});
|
||||
|
||||
failTests.forEach(test => {
|
||||
it(`should ${test.name}`, done => {
|
||||
generateParsedXml(test.params, xml => {
|
||||
checkError(xml, test.error, test.errorMessage, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
passTests.forEach(test => {
|
||||
it(`should ${test.name}`, done => {
|
||||
generateParsedXml(test.params, xml => {
|
||||
const config = new ObjectLockConfiguration(xml).
|
||||
getValidatedObjectLockConfiguration();
|
||||
assert.ifError(config.error);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('ObjectLockConfiguration class getConfigXML', () => {
|
||||
passTestsGetConfigXML.forEach(test => {
|
||||
const { config, description, expectedXml } = test;
|
||||
it(`should return correct XML ${description}`, () => {
|
||||
const responseXml = ObjectLockConfiguration.getConfigXML(config);
|
||||
assert.strictEqual(responseXml, expectedXml);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -2,11 +2,6 @@ const assert = require('assert');
|
|||
const ObjectMD = require('../../../lib/models/ObjectMD');
|
||||
const constants = require('../../../lib/constants');
|
||||
|
||||
const retainDate = new Date();
|
||||
retainDate.setDate(retainDate.getDate() + 1);
|
||||
const laterDate = new Date();
|
||||
laterDate.setDate(laterDate.getDate() + 5);
|
||||
|
||||
describe('ObjectMD class setters/getters', () => {
|
||||
let md = null;
|
||||
|
||||
|
@ -65,11 +60,11 @@ describe('ObjectMD class setters/getters', () => {
|
|||
['Key', 'key'],
|
||||
['Location', null, []],
|
||||
['Location', ['location1']],
|
||||
['IsNull', null, false],
|
||||
['IsNull', null, ''],
|
||||
['IsNull', true],
|
||||
['NullVersionId', null, undefined],
|
||||
['NullVersionId', null, ''],
|
||||
['NullVersionId', '111111'],
|
||||
['IsDeleteMarker', null, false],
|
||||
['IsDeleteMarker', null, ''],
|
||||
['IsDeleteMarker', true],
|
||||
['VersionId', null, undefined],
|
||||
['VersionId', '111111'],
|
||||
|
@ -104,10 +99,6 @@ describe('ObjectMD class setters/getters', () => {
|
|||
dataStoreVersionId: '',
|
||||
}],
|
||||
['DataStoreName', null, ''],
|
||||
['LegalHold', null, false],
|
||||
['LegalHold', true],
|
||||
['RetentionMode', 'GOVERNANCE'],
|
||||
['RetentionDate', retainDate.toISOString()],
|
||||
].forEach(test => {
|
||||
const property = test[0];
|
||||
const testValue = test[1];
|
||||
|
@ -201,16 +192,6 @@ describe('ObjectMD class setters/getters', () => {
|
|||
assert.strictEqual(
|
||||
md.getReplicationSiteDataStoreVersionId('zenko'), 'a');
|
||||
});
|
||||
|
||||
it('ObjectMD::set/getRetentionMode', () => {
|
||||
md.setRetentionMode('COMPLIANCE');
|
||||
assert.deepStrictEqual(md.getRetentionMode(), 'COMPLIANCE');
|
||||
});
|
||||
|
||||
it('ObjectMD::set/getRetentionDate', () => {
|
||||
md.setRetentionDate(laterDate.toISOString());
|
||||
assert.deepStrictEqual(md.getRetentionDate(), laterDate.toISOString());
|
||||
});
|
||||
});
|
||||
|
||||
describe('ObjectMD import from stored blob', () => {
|
||||
|
|
|
@ -180,39 +180,4 @@ describe('network.Server: ', () => {
|
|||
res.end('done');
|
||||
}).start();
|
||||
});
|
||||
|
||||
it('should automatically close idle connections with setKeepAliveTimeout()', done => {
|
||||
const ws = new Server(3000, log);
|
||||
ws.setKeepAliveTimeout(1000);
|
||||
ws.onError(done).onListening(() => {
|
||||
const options = {
|
||||
hostname: '127.0.0.1',
|
||||
port: 3000,
|
||||
path: '/',
|
||||
agent: new http.Agent({ keepAlive: true }),
|
||||
};
|
||||
const req = http.request(options, res => {
|
||||
res.on('data', () => {});
|
||||
res.on('end', () => {});
|
||||
});
|
||||
req.on('error', err => {
|
||||
assert.ifError(err);
|
||||
});
|
||||
req.end();
|
||||
}).onRequest((req, res) => {
|
||||
res.writeHead(200, { 'Content-Type': 'text/plain' });
|
||||
res.end();
|
||||
assert.strictEqual(ws._server._connections, 1);
|
||||
setTimeout(() => {
|
||||
// client connection should remain open after less than 1000ms
|
||||
assert.strictEqual(ws._server._connections, 1);
|
||||
setTimeout(() => {
|
||||
// client connection should have been closed after more than 1000ms
|
||||
assert.strictEqual(ws._server.connections, 0);
|
||||
ws.stop();
|
||||
ws.onStop(done);
|
||||
}, 200);
|
||||
}, 900);
|
||||
}).start();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,64 +0,0 @@
|
|||
const assert = require('assert');
|
||||
const DummyRequest = require('../../utils/DummyRequest');
|
||||
const requestUtils = require('../../../lib/policyEvaluator/requestUtils');
|
||||
|
||||
describe('requestUtils.getClientIp', () => {
|
||||
// s3 config with 'requests.viaProxy` enabled
|
||||
const configWithProxy
|
||||
= require('../../utils/dummyS3ConfigProxy.json');
|
||||
// s3 config with 'requests.viaProxy` disabled
|
||||
const configWithoutProxy = require('../../utils/dummyS3Config.json');
|
||||
const testClientIp1 = '192.168.100.1';
|
||||
const testClientIp2 = '192.168.104.0';
|
||||
const testProxyIp = '192.168.100.2';
|
||||
|
||||
it('should return client Ip address from header ' +
|
||||
'if the request comes via proxies', () => {
|
||||
const request = new DummyRequest({
|
||||
headers: {
|
||||
'x-forwarded-for': [testClientIp1, testProxyIp].join(','),
|
||||
},
|
||||
url: '/',
|
||||
parsedHost: 'localhost',
|
||||
socket: {
|
||||
remoteAddress: testProxyIp,
|
||||
},
|
||||
});
|
||||
const result = requestUtils.getClientIp(request, configWithProxy);
|
||||
assert.strictEqual(result, testClientIp1);
|
||||
});
|
||||
|
||||
it('should not return client Ip address from header ' +
|
||||
'if the request is not forwarded from proxies or ' +
|
||||
'fails ip check', () => {
|
||||
const request = new DummyRequest({
|
||||
headers: {
|
||||
'x-forwarded-for': [testClientIp1, testProxyIp].join(','),
|
||||
},
|
||||
url: '/',
|
||||
parsedHost: 'localhost',
|
||||
socket: {
|
||||
remoteAddress: testClientIp2,
|
||||
},
|
||||
});
|
||||
const result = requestUtils.getClientIp(request, configWithoutProxy);
|
||||
assert.strictEqual(result, testClientIp2);
|
||||
});
|
||||
|
||||
it('should not return client Ip address from header ' +
|
||||
'if the request is forwarded from proxies, but the request' +
|
||||
'has no expected header or the header value is empty', () => {
|
||||
const request = new DummyRequest({
|
||||
headers: {
|
||||
'x-forwarded-for': ' ',
|
||||
},
|
||||
url: '/',
|
||||
parsedHost: 'localhost',
|
||||
socket: {
|
||||
remoteAddress: testClientIp2,
|
||||
},
|
||||
});
|
||||
const result = requestUtils.getClientIp(request, configWithProxy);
|
||||
assert.strictEqual(result, testClientIp2);
|
||||
});
|
||||
});
|
|
@ -1,94 +0,0 @@
|
|||
const assert = require('assert');
|
||||
const checkArnMatch
|
||||
= require('../../../../lib/policyEvaluator/utils/checkArnMatch');
|
||||
|
||||
const tests = [
|
||||
{
|
||||
policyArn: 'arn:aws:iam::*:policy/1236-Ng',
|
||||
requestArn: 'arn:aws:iam::005978442556:policy/1236-Ng',
|
||||
caseSensitive: true,
|
||||
isMatch: true,
|
||||
},
|
||||
{
|
||||
policyArn: 'arn:aws:iam::*:policy/1236-Ng',
|
||||
requestArn: 'arn:aws:iam::005978442556:policy/1236-Ng',
|
||||
caseSensitive: false,
|
||||
isMatch: true,
|
||||
},
|
||||
{
|
||||
policyArn: 'arn:aws:iam::*:policy/1236-Ng',
|
||||
requestArn: 'arn:aws:iam::005978442556:policy/1236-ng',
|
||||
caseSensitive: true,
|
||||
isMatch: false,
|
||||
},
|
||||
{
|
||||
policyArn: 'arn:aws:iam::*:policy/1236-Ng',
|
||||
requestArn: 'arn:aws:iam::005978442556:policy/1236-ng',
|
||||
caseSensitive: false,
|
||||
isMatch: true,
|
||||
},
|
||||
{
|
||||
policyArn: 'arn:aws:iam::005978442556:policy/1236-Ng',
|
||||
requestArn: 'arn:aws:iam::005978442556:policy/1236-Ng',
|
||||
caseSensitive: true,
|
||||
isMatch: true,
|
||||
},
|
||||
{
|
||||
policyArn: 'arn:aws:iam::005978442556:policy/1236-Ng',
|
||||
requestArn: 'arn:aws:iam::005978442556:policy/1236-Ng',
|
||||
caseSensitive: false,
|
||||
isMatch: true,
|
||||
},
|
||||
{
|
||||
policyArn: 'arn:aws:iam::005978442556:policy/1236-Ng',
|
||||
requestArn: 'arn:aws:iam::005978442556:policy/1236-ng',
|
||||
caseSensitive: true,
|
||||
isMatch: false,
|
||||
},
|
||||
{
|
||||
policyArn: 'arn:aws:iam::005978442556:policy/1236-Ng',
|
||||
requestArn: 'arn:aws:iam::005978442556:policy/1236-ng',
|
||||
caseSensitive: false,
|
||||
isMatch: true,
|
||||
},
|
||||
{
|
||||
policyArn: 'arn:aws:iam::005978442556:policy/1236-Ng',
|
||||
requestArn: 'arn:aws:iam::005978442557:policy/1236-Ng',
|
||||
caseSensitive: true,
|
||||
isMatch: false,
|
||||
},
|
||||
{
|
||||
policyArn: 'arn:aws:iam::005978442556:policy/1236-Ng',
|
||||
requestArn: 'arn:aws:iam::005978442557:policy/1236-Ng',
|
||||
caseSensitive: false,
|
||||
isMatch: false,
|
||||
},
|
||||
{
|
||||
policyArn: 'arn:aws:iam::005978442556:policy/1236-Ng',
|
||||
requestArn: 'arn:aws:iam::005978442557:policy/1236-ng',
|
||||
caseSensitive: true,
|
||||
isMatch: false,
|
||||
},
|
||||
{
|
||||
policyArn: 'arn:aws:iam::005978442556:policy/1236-Ng',
|
||||
requestArn: 'arn:aws:iam::005978442557:policy/1236-ng',
|
||||
caseSensitive: false,
|
||||
isMatch: false,
|
||||
},
|
||||
];
|
||||
|
||||
describe('policyEvaluator checkArnMatch utility function', () => {
|
||||
tests.forEach(test => {
|
||||
it(`Check '${test.requestArn}' against '${test.policyArn}' with case ` +
|
||||
`sensitive check ${test.caseSensitive ? 'enabled' : 'disabled'} ` +
|
||||
`and it should ${test.isMatch ? 'be' : 'not be'} a match`, () => {
|
||||
const requestArn = test.requestArn;
|
||||
const requestResourceArr = requestArn.split(':');
|
||||
const requestRelativeId = requestResourceArr.slice(5).join(':');
|
||||
const caseSensitive = test.caseSensitive;
|
||||
const result = checkArnMatch(test.policyArn, requestRelativeId,
|
||||
requestResourceArr, caseSensitive);
|
||||
assert.deepStrictEqual(result, test.isMatch);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,92 +0,0 @@
|
|||
const assert = require('assert');
|
||||
const { convertToXml, parseLegalHoldXml } =
|
||||
require('../../../lib/s3middleware/objectLegalHold');
|
||||
const DummyRequestLogger = require('../helpers').DummyRequestLogger;
|
||||
|
||||
const log = new DummyRequestLogger();
|
||||
|
||||
const failTests = [
|
||||
{
|
||||
description: 'should fail with empty status',
|
||||
params: { status: '' },
|
||||
error: 'MalformedXML',
|
||||
errMessage: 'request xml does not contain Status',
|
||||
},
|
||||
{
|
||||
description: 'should fail with invalid status "on"',
|
||||
params: { status: 'on' },
|
||||
error: 'MalformedXML',
|
||||
errMessage: 'Status request xml must be one of "ON", "OFF"',
|
||||
},
|
||||
{
|
||||
description: 'should fail with invalid status "On"',
|
||||
params: { status: 'On' },
|
||||
error: 'MalformedXML',
|
||||
errMessage: 'Status request xml must be one of "ON", "OFF"',
|
||||
},
|
||||
{
|
||||
description: 'should fail with invalid status "off"',
|
||||
params: { status: 'off' },
|
||||
error: 'MalformedXML',
|
||||
errMessage: 'Status request xml must be one of "ON", "OFF"',
|
||||
},
|
||||
{
|
||||
description: 'should fail with invalid status "Off"',
|
||||
params: { status: 'Off' },
|
||||
error: 'MalformedXML',
|
||||
errMessage: 'Status request xml must be one of "ON", "OFF"',
|
||||
},
|
||||
];
|
||||
|
||||
const generateXml = status =>
|
||||
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>' +
|
||||
`<LegalHold><Status>${status}</Status></LegalHold>`;
|
||||
|
||||
describe('object legal hold helpers: parseLegalHoldXml', () => {
|
||||
failTests.forEach(test => {
|
||||
it(test.description, done => {
|
||||
const status = test.params.status;
|
||||
parseLegalHoldXml(generateXml(status), log, err => {
|
||||
assert(err[test.error]);
|
||||
assert.strictEqual(err.description, test.errMessage);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should pass with legal hold status "ON"', done => {
|
||||
parseLegalHoldXml(generateXml('ON'), log, (err, result) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(result, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should pass with legal hold status "OFF"', done => {
|
||||
parseLegalHoldXml(generateXml('OFF'), log, (err, result) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(result, false);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('object legal hold helpers: convertToXml', () => {
|
||||
it('should return correct xml when legal hold status "ON"', () => {
|
||||
const xml = convertToXml(true);
|
||||
const expextedXml = generateXml('ON');
|
||||
assert.strictEqual(xml, expextedXml);
|
||||
});
|
||||
|
||||
it('should return correct xml when legal hold status "OFF"', () => {
|
||||
const xml = convertToXml(false);
|
||||
const expextedXml = generateXml('OFF');
|
||||
assert.strictEqual(xml, expextedXml);
|
||||
});
|
||||
|
||||
it('should return empty string when legal hold not set', () => {
|
||||
const xml = convertToXml(undefined);
|
||||
const expextedXml = '';
|
||||
assert.strictEqual(xml, expextedXml);
|
||||
});
|
||||
});
|
|
@ -1,127 +0,0 @@
|
|||
const assert = require('assert');
|
||||
const {
|
||||
parseRetentionXml,
|
||||
convertToXml,
|
||||
} = require('../../../lib/s3middleware/objectRetention');
|
||||
const DummyRequestLogger = require('../helpers').DummyRequestLogger;
|
||||
|
||||
const log = new DummyRequestLogger();
|
||||
|
||||
const date = new Date();
|
||||
date.setDate(date.getDate() + 1);
|
||||
const failDate = new Date('05/01/2020');
|
||||
const passDate = new Date();
|
||||
passDate.setDate(passDate.getDate() + 2);
|
||||
|
||||
|
||||
function buildXml(key, value) {
|
||||
const mode = key === 'Mode' ?
|
||||
`<Mode>${value}</Mode>` :
|
||||
'<Mode>GOVERNANCE</Mode>';
|
||||
const retainDate = key === 'RetainDate' ?
|
||||
`<RetainUntilDate>${value}</RetainUntilDate>` :
|
||||
`<RetainUntilDate>${date}</RetainUntilDate>`;
|
||||
const retention = key === 'Retention' ?
|
||||
`<Retention>${value}</Retention>` :
|
||||
`<Retention>${mode}${retainDate}</Retention>`;
|
||||
return retention;
|
||||
}
|
||||
|
||||
const expectedRetention = {
|
||||
mode: 'GOVERNANCE',
|
||||
date: passDate.toISOString(),
|
||||
};
|
||||
|
||||
const expectedXml =
|
||||
'<Retention xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Mode>GOVERNANCE</Mode>' +
|
||||
`<RetainUntilDate>${passDate.toISOString()}</RetainUntilDate>` +
|
||||
'</Retention>';
|
||||
|
||||
const failTests = [
|
||||
{
|
||||
name: 'should fail with empty retention',
|
||||
params: { key: 'Retention', value: '' },
|
||||
error: 'MalformedXML',
|
||||
errMessage: 'request xml does not contain Retention',
|
||||
},
|
||||
{
|
||||
name: 'should fail with empty mode',
|
||||
params: { key: 'Mode', value: '' },
|
||||
error: 'MalformedXML',
|
||||
errMessage: 'request xml does not contain Mode',
|
||||
},
|
||||
{
|
||||
name: 'should fail with empty retain until date',
|
||||
params: { key: 'RetainDate', value: '' },
|
||||
error: 'MalformedXML',
|
||||
errMessage: 'request xml does not contain RetainUntilDate',
|
||||
},
|
||||
{
|
||||
name: 'should fail with invalid mode',
|
||||
params: { key: 'Mode', value: 'GOVERPLIANCE' },
|
||||
error: 'MalformedXML',
|
||||
errMessage: 'Mode request xml must be one of "GOVERNANCE", ' +
|
||||
'"COMPLIANCE"',
|
||||
},
|
||||
{
|
||||
name: 'should fail with retain until date in UTC format',
|
||||
params: { key: 'RetainDate', value: `${date.toUTCString()}` },
|
||||
error: 'InvalidRequest',
|
||||
errMessage: 'RetainUntilDate timestamp must be ISO-8601 format',
|
||||
},
|
||||
{
|
||||
name: 'should fail with retain until date in GMT format',
|
||||
params: { key: 'RetainDate', value: `${date.toString()}` },
|
||||
error: 'InvalidRequest',
|
||||
errMessage: 'RetainUntilDate timestamp must be ISO-8601 format',
|
||||
},
|
||||
{
|
||||
name: 'should fail with retain until date in past',
|
||||
params: { key: 'RetainDate', value: failDate.toISOString() },
|
||||
error: 'InvalidRequest',
|
||||
errMessage: 'RetainUntilDate must be in the future',
|
||||
},
|
||||
];
|
||||
|
||||
describe('object Retention validation', () => {
|
||||
failTests.forEach(t => {
|
||||
it(t.name, done => {
|
||||
parseRetentionXml(buildXml(t.params.key, t.params.value), log,
|
||||
err => {
|
||||
assert(err[t.error]);
|
||||
assert.strictEqual(err.description, t.errMessage);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should pass with valid retention', done => {
|
||||
parseRetentionXml(buildXml('RetainDate', passDate.toISOString()), log,
|
||||
(err, result) => {
|
||||
assert.ifError(err);
|
||||
assert.deepStrictEqual(result, expectedRetention);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('object Retention xml', () => {
|
||||
it('should return empty string if no retention date', done => {
|
||||
const xml = convertToXml('GOVERNANCE', '');
|
||||
assert.equal(xml, '');
|
||||
done();
|
||||
});
|
||||
|
||||
it('should return empty string if no retention mode', done => {
|
||||
const xml = convertToXml('', passDate.toISOString());
|
||||
assert.equal(xml, '');
|
||||
done();
|
||||
});
|
||||
|
||||
it('should return xml string', done => {
|
||||
const xml = convertToXml('GOVERNANCE', passDate.toISOString());
|
||||
assert.strictEqual(xml, expectedXml);
|
||||
done();
|
||||
});
|
||||
});
|
|
@ -1,21 +0,0 @@
|
|||
const assert = require('assert');
|
||||
const { _validator } = require('../../../lib/s3middleware/tagging');
|
||||
|
||||
describe('tagging validator', () => {
|
||||
it('validates keys and values are less than 128 and 256', () => {
|
||||
const result = _validator.validateKeyValue('hey', 'you guys');
|
||||
assert.strictEqual(result, true);
|
||||
});
|
||||
it('returns error for keys greater than 128', () => {
|
||||
const result = _validator.validateKeyValue('Y'.repeat(200), 'you guys');
|
||||
assert(result instanceof Error);
|
||||
});
|
||||
it('returns error for values greater than 256', () => {
|
||||
const result = _validator.validateKeyValue('X', 'Z'.repeat(300));
|
||||
assert(result instanceof Error);
|
||||
});
|
||||
it('allows any utf8 string in keys and values', () => {
|
||||
const result = _validator.validateKeyValue('あいう', '😀😄');
|
||||
assert.strictEqual(result, true);
|
||||
});
|
||||
});
|
|
@ -3,28 +3,6 @@ const routesUtils = require('../../../../lib/s3routes/routesUtils.js');
|
|||
|
||||
const bannedStr = 'banned';
|
||||
const prefixBlacklist = [];
|
||||
const validBucketNamesWithDotsAndHyphens = [
|
||||
'my-bucket',
|
||||
'my.bucket',
|
||||
'my-bucket-01',
|
||||
'01-my-bucket',
|
||||
'my.bucket.01',
|
||||
'01.my.bucket',
|
||||
'my.bucket-01',
|
||||
'my-bucket.01',
|
||||
'my--bucket--01',
|
||||
'my--bucket.01',
|
||||
'my.bucket--01',
|
||||
];
|
||||
const invalidBucketNamesWithDotsAndHyphens = [
|
||||
'-my-bucket',
|
||||
'.my.bucket',
|
||||
'my-bucket-',
|
||||
'my.bucket.',
|
||||
'my..bucket',
|
||||
'my-.bucket',
|
||||
'my.-bucket',
|
||||
];
|
||||
|
||||
describe('routesUtils.isValidBucketName', () => {
|
||||
it('should return false if bucketname is fewer than ' +
|
||||
|
@ -78,30 +56,4 @@ describe('routesUtils.isValidBucketName', () => {
|
|||
const result = routesUtils.isValidBucketName('okay', prefixBlacklist);
|
||||
assert.strictEqual(result, true);
|
||||
});
|
||||
|
||||
describe('should return true when bucket name has valid' +
|
||||
' combination of dots and hyphens', () => {
|
||||
validBucketNamesWithDotsAndHyphens.forEach(bucketName => {
|
||||
it(`should return true if bucketname is '${bucketName}'`,
|
||||
() => {
|
||||
const result =
|
||||
routesUtils.isValidBucketName(bucketName,
|
||||
prefixBlacklist);
|
||||
assert.strictEqual(result, true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('should return false when bucket name has invalid' +
|
||||
' combination of dots and hyphens', () => {
|
||||
invalidBucketNamesWithDotsAndHyphens.forEach(bucketName => {
|
||||
it(`should return false if bucketname is '${bucketName}'`,
|
||||
() => {
|
||||
const result =
|
||||
routesUtils.isValidBucketName(bucketName,
|
||||
prefixBlacklist);
|
||||
assert.strictEqual(result, false);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,56 +0,0 @@
|
|||
const assert = require('assert');
|
||||
const stream = require('stream');
|
||||
const joi = require('@hapi/joi');
|
||||
const readJSONStreamObject = require('../../../lib/stream/readJSONStreamObject');
|
||||
|
||||
class ReqStream extends stream.Readable {
|
||||
constructor(contents) {
|
||||
super();
|
||||
this.contents = contents;
|
||||
}
|
||||
|
||||
_read() {
|
||||
while (this.contents.length > 0) {
|
||||
this.push(this.contents.slice(0, 1000));
|
||||
this.contents = this.contents.slice(1000);
|
||||
}
|
||||
this.push(null);
|
||||
}
|
||||
}
|
||||
|
||||
describe('readJSONStreamObject', () => {
|
||||
[{
|
||||
desc: 'accept a valid JSON object',
|
||||
contents: '{"foo":"bar","baz":42}',
|
||||
error: false,
|
||||
value: { foo: 'bar', baz: 42 },
|
||||
}, {
|
||||
desc: 'error with empty stream contents',
|
||||
contents: '',
|
||||
error: true,
|
||||
}, {
|
||||
desc: 'error if stream contents does not match against the validation schema',
|
||||
contents: '"foo"',
|
||||
joiSchema: joi.object(),
|
||||
error: true,
|
||||
}, {
|
||||
desc: 'accept a large JSON payload',
|
||||
contents: `[${new Array(10000).fill('"some-payload"').join(',')}]`,
|
||||
error: false,
|
||||
value: new Array(10000).fill('some-payload'),
|
||||
}].forEach(testCase => {
|
||||
it(`should ${testCase.desc}`, async () => {
|
||||
let value;
|
||||
try {
|
||||
value = await readJSONStreamObject(
|
||||
new ReqStream(testCase.contents), testCase.joiSchema);
|
||||
} catch (err) {
|
||||
assert.strictEqual(testCase.error, true);
|
||||
return undefined;
|
||||
}
|
||||
assert.strictEqual(testCase.error, false);
|
||||
assert.deepStrictEqual(testCase.value, value);
|
||||
return undefined;
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,81 +0,0 @@
|
|||
{
|
||||
"port": 8000,
|
||||
"listenOn": [],
|
||||
"replicationGroupId": "RG001",
|
||||
"restEndpoints": {
|
||||
"localhost": "us-east-1",
|
||||
"127.0.0.1": "us-east-1",
|
||||
"cloudserver-front": "us-east-1",
|
||||
"s3.docker.test": "us-east-1",
|
||||
"127.0.0.2": "us-east-1",
|
||||
"s3.amazonaws.com": "us-east-1"
|
||||
},
|
||||
"websiteEndpoints": ["s3-website-us-east-1.amazonaws.com",
|
||||
"s3-website.us-east-2.amazonaws.com",
|
||||
"s3-website-us-west-1.amazonaws.com",
|
||||
"s3-website-us-west-2.amazonaws.com",
|
||||
"s3-website.ap-south-1.amazonaws.com",
|
||||
"s3-website.ap-northeast-2.amazonaws.com",
|
||||
"s3-website-ap-southeast-1.amazonaws.com",
|
||||
"s3-website-ap-southeast-2.amazonaws.com",
|
||||
"s3-website-ap-northeast-1.amazonaws.com",
|
||||
"s3-website.eu-central-1.amazonaws.com",
|
||||
"s3-website-eu-west-1.amazonaws.com",
|
||||
"s3-website-sa-east-1.amazonaws.com",
|
||||
"s3-website.localhost",
|
||||
"s3-website.scality.test"],
|
||||
"replicationEndpoints": [{
|
||||
"site": "zenko",
|
||||
"servers": ["127.0.0.1:8000"],
|
||||
"default": true
|
||||
}, {
|
||||
"site": "us-east-2",
|
||||
"type": "aws_s3"
|
||||
}],
|
||||
"cdmi": {
|
||||
"host": "localhost",
|
||||
"port": 81,
|
||||
"path": "/dewpoint",
|
||||
"readonly": true
|
||||
},
|
||||
"bucketd": {
|
||||
"bootstrap": ["localhost:9000"]
|
||||
},
|
||||
"vaultd": {
|
||||
"host": "localhost",
|
||||
"port": 8500
|
||||
},
|
||||
"clusters": 10,
|
||||
"log": {
|
||||
"logLevel": "info",
|
||||
"dumpLevel": "error"
|
||||
},
|
||||
"healthChecks": {
|
||||
"allowFrom": ["127.0.0.1/8", "::1"]
|
||||
},
|
||||
"metadataClient": {
|
||||
"host": "localhost",
|
||||
"port": 9990
|
||||
},
|
||||
"dataClient": {
|
||||
"host": "localhost",
|
||||
"port": 9991
|
||||
},
|
||||
"metadataDaemon": {
|
||||
"bindAddress": "localhost",
|
||||
"port": 9990
|
||||
},
|
||||
"dataDaemon": {
|
||||
"bindAddress": "localhost",
|
||||
"port": 9991
|
||||
},
|
||||
"recordLog": {
|
||||
"enabled": false,
|
||||
"recordLogName": "s3-recordlog"
|
||||
},
|
||||
"mongodb": {
|
||||
"host": "localhost",
|
||||
"port": 27018,
|
||||
"database": "metadata"
|
||||
}
|
||||
}
|
|
@ -1,86 +0,0 @@
|
|||
{
|
||||
"port": 8000,
|
||||
"listenOn": [],
|
||||
"replicationGroupId": "RG001",
|
||||
"restEndpoints": {
|
||||
"localhost": "us-east-1",
|
||||
"127.0.0.1": "us-east-1",
|
||||
"cloudserver-front": "us-east-1",
|
||||
"s3.docker.test": "us-east-1",
|
||||
"127.0.0.2": "us-east-1",
|
||||
"s3.amazonaws.com": "us-east-1"
|
||||
},
|
||||
"websiteEndpoints": ["s3-website-us-east-1.amazonaws.com",
|
||||
"s3-website.us-east-2.amazonaws.com",
|
||||
"s3-website-us-west-1.amazonaws.com",
|
||||
"s3-website-us-west-2.amazonaws.com",
|
||||
"s3-website.ap-south-1.amazonaws.com",
|
||||
"s3-website.ap-northeast-2.amazonaws.com",
|
||||
"s3-website-ap-southeast-1.amazonaws.com",
|
||||
"s3-website-ap-southeast-2.amazonaws.com",
|
||||
"s3-website-ap-northeast-1.amazonaws.com",
|
||||
"s3-website.eu-central-1.amazonaws.com",
|
||||
"s3-website-eu-west-1.amazonaws.com",
|
||||
"s3-website-sa-east-1.amazonaws.com",
|
||||
"s3-website.localhost",
|
||||
"s3-website.scality.test"],
|
||||
"replicationEndpoints": [{
|
||||
"site": "zenko",
|
||||
"servers": ["127.0.0.1:8000"],
|
||||
"default": true
|
||||
}, {
|
||||
"site": "us-east-2",
|
||||
"type": "aws_s3"
|
||||
}],
|
||||
"cdmi": {
|
||||
"host": "localhost",
|
||||
"port": 81,
|
||||
"path": "/dewpoint",
|
||||
"readonly": true
|
||||
},
|
||||
"bucketd": {
|
||||
"bootstrap": ["localhost:9000"]
|
||||
},
|
||||
"vaultd": {
|
||||
"host": "localhost",
|
||||
"port": 8500
|
||||
},
|
||||
"clusters": 10,
|
||||
"log": {
|
||||
"logLevel": "info",
|
||||
"dumpLevel": "error"
|
||||
},
|
||||
"healthChecks": {
|
||||
"allowFrom": ["127.0.0.1/8", "::1"]
|
||||
},
|
||||
"metadataClient": {
|
||||
"host": "localhost",
|
||||
"port": 9990
|
||||
},
|
||||
"dataClient": {
|
||||
"host": "localhost",
|
||||
"port": 9991
|
||||
},
|
||||
"metadataDaemon": {
|
||||
"bindAddress": "localhost",
|
||||
"port": 9990
|
||||
},
|
||||
"dataDaemon": {
|
||||
"bindAddress": "localhost",
|
||||
"port": 9991
|
||||
},
|
||||
"recordLog": {
|
||||
"enabled": false,
|
||||
"recordLogName": "s3-recordlog"
|
||||
},
|
||||
"mongodb": {
|
||||
"host": "localhost",
|
||||
"port": 27018,
|
||||
"database": "metadata"
|
||||
},
|
||||
"requests": {
|
||||
"viaProxy": true,
|
||||
"trustedProxyCIDRs": ["192.168.100.0/22"],
|
||||
"extractClientIPFromHeader": "x-forwarded-for"
|
||||
}
|
||||
}
|
|
@ -1,9 +1,5 @@
|
|||
const assert = require('assert');
|
||||
|
||||
module.exports = function performListing(data, Extension, params, logger, vFormat) {
|
||||
const listing = new Extension(params, logger, vFormat);
|
||||
const mdParams = listing.genMDParams();
|
||||
assert.strictEqual(typeof mdParams, 'object');
|
||||
module.exports = function performListing(data, Extension, params, logger) {
|
||||
const listing = new Extension(params, logger);
|
||||
data.every(e => listing.filter(e) >= 0);
|
||||
return listing.result();
|
||||
};
|
||||
|
|
224
yarn.lock
224
yarn.lock
|
@ -39,42 +39,6 @@
|
|||
dependencies:
|
||||
"@hapi/hoek" "8.x.x"
|
||||
|
||||
"@sinonjs/commons@^1", "@sinonjs/commons@^1.6.0", "@sinonjs/commons@^1.7.0", "@sinonjs/commons@^1.7.2":
|
||||
version "1.7.2"
|
||||
resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.7.2.tgz#505f55c74e0272b43f6c52d81946bed7058fc0e2"
|
||||
integrity sha512-+DUO6pnp3udV/v2VfUWgaY5BIE1IfT7lLfeDzPVeMT1XKkaAp9LgSI9x5RtrFQoZ9Oi0PgXQQHPaoKu7dCjVxw==
|
||||
dependencies:
|
||||
type-detect "4.0.8"
|
||||
|
||||
"@sinonjs/fake-timers@^6.0.0", "@sinonjs/fake-timers@^6.0.1":
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-6.0.1.tgz#293674fccb3262ac782c7aadfdeca86b10c75c40"
|
||||
integrity sha512-MZPUxrmFubI36XS1DI3qmI0YdN1gks62JtFZvxR67ljjSNCeK6U08Zx4msEWOXuofgqUt6zPHSi1H9fbjR/NRA==
|
||||
dependencies:
|
||||
"@sinonjs/commons" "^1.7.0"
|
||||
|
||||
"@sinonjs/formatio@^5.0.1":
|
||||
version "5.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@sinonjs/formatio/-/formatio-5.0.1.tgz#f13e713cb3313b1ab965901b01b0828ea6b77089"
|
||||
integrity sha512-KaiQ5pBf1MpS09MuA0kp6KBQt2JUOQycqVG1NZXvzeaXe5LGFqAKueIS0bw4w0P9r7KuBSVdUk5QjXsUdu2CxQ==
|
||||
dependencies:
|
||||
"@sinonjs/commons" "^1"
|
||||
"@sinonjs/samsam" "^5.0.2"
|
||||
|
||||
"@sinonjs/samsam@^5.0.2", "@sinonjs/samsam@^5.0.3":
|
||||
version "5.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-5.0.3.tgz#86f21bdb3d52480faf0892a480c9906aa5a52938"
|
||||
integrity sha512-QucHkc2uMJ0pFGjJUDP3F9dq5dx8QIaqISl9QgwLOh6P9yv877uONPGXh/OH/0zmM3tW1JjuJltAZV2l7zU+uQ==
|
||||
dependencies:
|
||||
"@sinonjs/commons" "^1.6.0"
|
||||
lodash.get "^4.4.2"
|
||||
type-detect "^4.0.8"
|
||||
|
||||
"@sinonjs/text-encoding@^0.7.1":
|
||||
version "0.7.1"
|
||||
resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz#8da5c6530915653f3a1f38fd5f101d8c3f8079c5"
|
||||
integrity sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==
|
||||
|
||||
JSONStream@^1.0.0:
|
||||
version "1.3.5"
|
||||
resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0"
|
||||
|
@ -119,9 +83,9 @@ acorn@^3.0.4:
|
|||
integrity sha1-ReN/s56No/JbruP/U2niu18iAXo=
|
||||
|
||||
acorn@^5.5.0:
|
||||
version "5.7.4"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.4.tgz#3e8d8a9947d0599a1796d10225d7432f4a4acf5e"
|
||||
integrity sha512-1D++VG7BhrtvQpNbBzovKNc1FLGGEE/oGe7b9xJm/RFHMBeUaUGpluV9RLjZa47YFdPcDAenEYuq9pQPcMdLJg==
|
||||
version "5.7.3"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.3.tgz#67aa231bf8812974b85235a96771eb6bd07ea279"
|
||||
integrity sha512-T/zvzYRfbVojPWahDsE5evJdHb3oJoQfFbsrKM7w5Zcs++Tr257tia3BmMP8XYVjp1S9RZXQMh7gao96BlqZOw==
|
||||
|
||||
after@0.8.2:
|
||||
version "0.8.2"
|
||||
|
@ -384,7 +348,7 @@ core-util-is@~1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
|
||||
integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
|
||||
|
||||
d@1, d@^1.0.1:
|
||||
d@1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/d/-/d-1.0.1.tgz#8698095372d58dbee346ffd0c7093f99f8f9eb5a"
|
||||
integrity sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==
|
||||
|
@ -450,11 +414,6 @@ diff@1.4.0:
|
|||
resolved "https://registry.yarnpkg.com/diff/-/diff-1.4.0.tgz#7f28d2eb9ee7b15a97efd89ce63dcfdaa3ccbabf"
|
||||
integrity sha1-fyjS657nsVqX79ic5j3P2qPMur8=
|
||||
|
||||
diff@^4.0.2:
|
||||
version "4.0.2"
|
||||
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d"
|
||||
integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==
|
||||
|
||||
diskusage@^1.1.1:
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/diskusage/-/diskusage-1.1.3.tgz#680d7dbf1b679168a195c9240eb3552cbd2c067b"
|
||||
|
@ -536,13 +495,13 @@ errno@~0.1.1:
|
|||
prr "~1.0.1"
|
||||
|
||||
es5-ext@^0.10.35, es5-ext@^0.10.46, es5-ext@^0.10.50, es5-ext@~0.10.14:
|
||||
version "0.10.53"
|
||||
resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1"
|
||||
integrity sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q==
|
||||
version "0.10.50"
|
||||
resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.50.tgz#6d0e23a0abdb27018e5ac4fd09b412bc5517a778"
|
||||
integrity sha512-KMzZTPBkeQV/JcSQhI5/z6d9VWJ3EnQ194USTUwIYZ2ZbpN8+SGXQKt1h68EX44+qt+Fzr8DO17vnxrw7c3agw==
|
||||
dependencies:
|
||||
es6-iterator "~2.0.3"
|
||||
es6-symbol "~3.1.3"
|
||||
next-tick "~1.0.0"
|
||||
es6-symbol "~3.1.1"
|
||||
next-tick "^1.0.0"
|
||||
|
||||
es6-iterator@^2.0.3, es6-iterator@~2.0.1, es6-iterator@~2.0.3:
|
||||
version "2.0.3"
|
||||
|
@ -581,7 +540,7 @@ es6-set@~0.1.5:
|
|||
es6-symbol "3.1.1"
|
||||
event-emitter "~0.3.5"
|
||||
|
||||
es6-symbol@3.1.1:
|
||||
es6-symbol@3.1.1, es6-symbol@^3.1.1, es6-symbol@~3.1.1:
|
||||
version "3.1.1"
|
||||
resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.1.tgz#bf00ef4fdab6ba1b46ecb7b629b4c7ed5715cc77"
|
||||
integrity sha1-vwDvT9q2uhtG7Le2KbTH7VcVzHc=
|
||||
|
@ -589,14 +548,6 @@ es6-symbol@3.1.1:
|
|||
d "1"
|
||||
es5-ext "~0.10.14"
|
||||
|
||||
es6-symbol@^3.1.1, es6-symbol@~3.1.1, es6-symbol@~3.1.3:
|
||||
version "3.1.3"
|
||||
resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18"
|
||||
integrity sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==
|
||||
dependencies:
|
||||
d "^1.0.1"
|
||||
ext "^1.1.2"
|
||||
|
||||
es6-weak-map@^2.0.1:
|
||||
version "2.0.3"
|
||||
resolved "https://registry.yarnpkg.com/es6-weak-map/-/es6-weak-map-2.0.3.tgz#b6da1f16cc2cc0d9be43e6bdbfc5e7dfcdf31d53"
|
||||
|
@ -632,9 +583,9 @@ eslint-config-airbnb@6.2.0:
|
|||
resolved "https://registry.yarnpkg.com/eslint-config-airbnb/-/eslint-config-airbnb-6.2.0.tgz#4a28196aa4617de01b8c914e992a82e5d0886a6e"
|
||||
integrity sha1-SigZaqRhfeAbjJFOmSqC5dCIam4=
|
||||
|
||||
eslint-config-scality@scality/Guidelines#ec33dfb:
|
||||
eslint-config-scality@scality/Guidelines#71a059ad:
|
||||
version "1.1.0"
|
||||
resolved "https://codeload.github.com/scality/Guidelines/tar.gz/ec33dfba6137749574f80dde6193b307536a86cc"
|
||||
resolved "https://codeload.github.com/scality/Guidelines/tar.gz/71a059ad3fa0598d5bbb923badda58ccf06cc8a6"
|
||||
dependencies:
|
||||
commander "1.3.2"
|
||||
markdownlint "0.0.8"
|
||||
|
@ -703,16 +654,11 @@ esrecurse@^4.1.0:
|
|||
dependencies:
|
||||
estraverse "^4.1.0"
|
||||
|
||||
estraverse@^4.1.0, estraverse@^4.1.1:
|
||||
estraverse@^4.1.0, estraverse@^4.1.1, estraverse@^4.2.0:
|
||||
version "4.2.0"
|
||||
resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13"
|
||||
integrity sha1-De4/7TH81GlhjOc0IJn8GvoL2xM=
|
||||
|
||||
estraverse@^4.2.0:
|
||||
version "4.3.0"
|
||||
resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d"
|
||||
integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==
|
||||
|
||||
esutils@^2.0.2:
|
||||
version "2.0.2"
|
||||
resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b"
|
||||
|
@ -731,14 +677,7 @@ exit-hook@^1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/exit-hook/-/exit-hook-1.1.1.tgz#f05ca233b48c05d54fff07765df8507e95c02ff8"
|
||||
integrity sha1-8FyiM7SMBdVP/wd2XfhQfpXAL/g=
|
||||
|
||||
ext@^1.1.2:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/ext/-/ext-1.4.0.tgz#89ae7a07158f79d35517882904324077e4379244"
|
||||
integrity sha512-Key5NIsUxdqKg3vIsdw9dSuXpPCQ297y6wBjL30edxwPgt2E44WcWBZey/ZvUc6sERLTxKdyCu4gZFmUbk1Q7A==
|
||||
dependencies:
|
||||
type "^2.0.0"
|
||||
|
||||
fast-levenshtein@~2.0.6:
|
||||
fast-levenshtein@~2.0.4:
|
||||
version "2.0.6"
|
||||
resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
|
||||
integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=
|
||||
|
@ -801,19 +740,7 @@ glob@3.2.11, glob@~3.2.8:
|
|||
inherits "2"
|
||||
minimatch "0.3"
|
||||
|
||||
glob@^7.0.3:
|
||||
version "7.1.6"
|
||||
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6"
|
||||
integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==
|
||||
dependencies:
|
||||
fs.realpath "^1.0.0"
|
||||
inflight "^1.0.4"
|
||||
inherits "2"
|
||||
minimatch "^3.0.4"
|
||||
once "^1.3.0"
|
||||
path-is-absolute "^1.0.0"
|
||||
|
||||
glob@^7.1.3:
|
||||
glob@^7.0.3, glob@^7.1.3:
|
||||
version "7.1.4"
|
||||
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255"
|
||||
integrity sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A==
|
||||
|
@ -831,9 +758,9 @@ globals@^9.2.0:
|
|||
integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ==
|
||||
|
||||
graceful-fs@^4.1.2:
|
||||
version "4.2.4"
|
||||
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb"
|
||||
integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw==
|
||||
version "4.2.0"
|
||||
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.0.tgz#8d8fdc73977cb04104721cb53666c1ca64cd328b"
|
||||
integrity sha512-jpSvDPV4Cq/bgtpndIWbI5hmYxhQGHPC4d4cqBPb4DLniCfhJokdXhwhaDuLBGLQdvvRum/UiX6ECVIPvDXqdg==
|
||||
|
||||
growl@1.9.2:
|
||||
version "1.9.2"
|
||||
|
@ -859,11 +786,6 @@ has-cors@1.1.0:
|
|||
resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39"
|
||||
integrity sha1-XkdHk/fqmEPRu5nCPu9J/xJv/zk=
|
||||
|
||||
has-flag@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
|
||||
integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
|
||||
|
||||
ignore@^3.1.2:
|
||||
version "3.3.10"
|
||||
resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043"
|
||||
|
@ -1006,9 +928,9 @@ jade@0.26.3:
|
|||
mkdirp "0.3.0"
|
||||
|
||||
js-yaml@^3.5.1:
|
||||
version "3.14.0"
|
||||
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.0.tgz#a7a34170f26a21bb162424d8adacb4113a69e482"
|
||||
integrity sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A==
|
||||
version "3.13.1"
|
||||
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.13.1.tgz#aff151b30bfdfa8e49e05da22e7415e9dfa37847"
|
||||
integrity sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==
|
||||
dependencies:
|
||||
argparse "^1.0.7"
|
||||
esprima "^4.0.0"
|
||||
|
@ -1040,11 +962,6 @@ jsonpointer@^4.0.0:
|
|||
resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-4.0.1.tgz#4fd92cb34e0e9db3c89c8622ecf51f9b978c6cb9"
|
||||
integrity sha1-T9kss04OnbPInIYi7PUfm5eMbLk=
|
||||
|
||||
just-extend@^4.0.2:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.1.0.tgz#7278a4027d889601640ee0ce0e5a00b992467da4"
|
||||
integrity sha512-ApcjaOdVTJ7y4r08xI5wIqpvwS48Q0PBG4DJROcEkH1f8MdAiNFyFxz3xoL0LWAVwjrwPYZdVHHxhRHcx/uGLA==
|
||||
|
||||
keypress@0.1.x:
|
||||
version "0.1.0"
|
||||
resolved "https://registry.yarnpkg.com/keypress/-/keypress-0.1.0.tgz#4a3188d4291b66b4f65edb99f806aa9ae293592a"
|
||||
|
@ -1183,11 +1100,6 @@ lodash.flatten@^4.4.0:
|
|||
resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f"
|
||||
integrity sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=
|
||||
|
||||
lodash.get@^4.4.2:
|
||||
version "4.4.2"
|
||||
resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99"
|
||||
integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=
|
||||
|
||||
lodash@^4.0.0, lodash@^4.14.0, lodash@^4.3.0:
|
||||
version "4.17.15"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
|
||||
|
@ -1291,30 +1203,18 @@ minimist@0.0.8:
|
|||
resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d"
|
||||
integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=
|
||||
|
||||
minimist@^1.2.5:
|
||||
version "1.2.5"
|
||||
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602"
|
||||
integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==
|
||||
|
||||
mkdirp@0.3.0:
|
||||
version "0.3.0"
|
||||
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.3.0.tgz#1bbf5ab1ba827af23575143490426455f481fe1e"
|
||||
integrity sha1-G79asbqCevI1dRQ0kEJkVfSB/h4=
|
||||
|
||||
mkdirp@0.5.1, mkdirp@^0.5.1:
|
||||
mkdirp@0.5.1, mkdirp@^0.5.0, mkdirp@^0.5.1:
|
||||
version "0.5.1"
|
||||
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
|
||||
integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=
|
||||
dependencies:
|
||||
minimist "0.0.8"
|
||||
|
||||
mkdirp@^0.5.0:
|
||||
version "0.5.5"
|
||||
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def"
|
||||
integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==
|
||||
dependencies:
|
||||
minimist "^1.2.5"
|
||||
|
||||
mocha@2.5.3:
|
||||
version "2.5.3"
|
||||
resolved "https://registry.yarnpkg.com/mocha/-/mocha-2.5.3.tgz#161be5bdeb496771eb9b35745050b622b5aefc58"
|
||||
|
@ -1371,22 +1271,11 @@ negotiator@0.6.1:
|
|||
resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9"
|
||||
integrity sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=
|
||||
|
||||
next-tick@~1.0.0:
|
||||
next-tick@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/next-tick/-/next-tick-1.0.0.tgz#ca86d1fe8828169b0120208e3dc8424b9db8342c"
|
||||
integrity sha1-yobR/ogoFpsBICCOPchCS524NCw=
|
||||
|
||||
nise@^4.0.1:
|
||||
version "4.0.3"
|
||||
resolved "https://registry.yarnpkg.com/nise/-/nise-4.0.3.tgz#9f79ff02fa002ed5ffbc538ad58518fa011dc913"
|
||||
integrity sha512-EGlhjm7/4KvmmE6B/UFsKh7eHykRl9VH+au8dduHLCyWUO/hr7+N+WtTvDUwc9zHuM1IaIJs/0lQ6Ag1jDkQSg==
|
||||
dependencies:
|
||||
"@sinonjs/commons" "^1.7.0"
|
||||
"@sinonjs/fake-timers" "^6.0.0"
|
||||
"@sinonjs/text-encoding" "^0.7.1"
|
||||
just-extend "^4.0.2"
|
||||
path-to-regexp "^1.7.0"
|
||||
|
||||
node-forge@^0.7.1:
|
||||
version "0.7.6"
|
||||
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac"
|
||||
|
@ -1435,16 +1324,16 @@ opencollective-postinstall@^2.0.0:
|
|||
integrity sha512-pVOEP16TrAO2/fjej1IdOyupJY8KDUM1CvsaScRbw6oddvpQoOfGk4ywha0HKKVAD6RkW4x6Q+tNBwhf3Bgpuw==
|
||||
|
||||
optionator@^0.8.1:
|
||||
version "0.8.3"
|
||||
resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495"
|
||||
integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==
|
||||
version "0.8.2"
|
||||
resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.2.tgz#364c5e409d3f4d6301d6c0b4c05bba50180aeb64"
|
||||
integrity sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q=
|
||||
dependencies:
|
||||
deep-is "~0.1.3"
|
||||
fast-levenshtein "~2.0.6"
|
||||
fast-levenshtein "~2.0.4"
|
||||
levn "~0.3.0"
|
||||
prelude-ls "~1.1.2"
|
||||
type-check "~0.3.2"
|
||||
word-wrap "~1.2.3"
|
||||
wordwrap "~1.0.0"
|
||||
|
||||
options@>=0.0.5:
|
||||
version "0.0.6"
|
||||
|
@ -1492,13 +1381,6 @@ path-is-inside@^1.0.1:
|
|||
resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53"
|
||||
integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM=
|
||||
|
||||
path-to-regexp@^1.7.0:
|
||||
version "1.8.0"
|
||||
resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a"
|
||||
integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==
|
||||
dependencies:
|
||||
isarray "0.0.1"
|
||||
|
||||
pluralize@^1.2.1:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-1.2.1.tgz#d1a21483fd22bb41e58a12fa3421823140897c45"
|
||||
|
@ -1578,9 +1460,9 @@ pull-window@^2.1.4:
|
|||
looper "^2.0.0"
|
||||
|
||||
readable-stream@^2.2.2:
|
||||
version "2.3.7"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
|
||||
integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
|
||||
version "2.3.6"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf"
|
||||
integrity sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==
|
||||
dependencies:
|
||||
core-util-is "~1.0.0"
|
||||
inherits "~2.0.3"
|
||||
|
@ -1719,19 +1601,6 @@ simple-glob@^0.1:
|
|||
lodash "~2.4.1"
|
||||
minimatch "~0.2.14"
|
||||
|
||||
sinon@^9.0.2:
|
||||
version "9.0.2"
|
||||
resolved "https://registry.yarnpkg.com/sinon/-/sinon-9.0.2.tgz#b9017e24633f4b1c98dfb6e784a5f0509f5fd85d"
|
||||
integrity sha512-0uF8Q/QHkizNUmbK3LRFqx5cpTttEVXudywY9Uwzy8bTfZUhljZ7ARzSxnRHWYWtVTeh4Cw+tTb3iU21FQVO9A==
|
||||
dependencies:
|
||||
"@sinonjs/commons" "^1.7.2"
|
||||
"@sinonjs/fake-timers" "^6.0.1"
|
||||
"@sinonjs/formatio" "^5.0.1"
|
||||
"@sinonjs/samsam" "^5.0.3"
|
||||
diff "^4.0.2"
|
||||
nise "^4.0.1"
|
||||
supports-color "^7.1.0"
|
||||
|
||||
slice-ansi@0.0.4:
|
||||
version "0.0.4"
|
||||
resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35"
|
||||
|
@ -1868,13 +1737,6 @@ supports-color@^2.0.0:
|
|||
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7"
|
||||
integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=
|
||||
|
||||
supports-color@^7.1.0:
|
||||
version "7.1.0"
|
||||
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.1.0.tgz#68e32591df73e25ad1c4b49108a2ec507962bfd1"
|
||||
integrity sha512-oRSIpR8pxT1Wr2FquTNnGet79b3BWljqOuoW/h4oBhxJ/HUbX5nX6JSruTkvXDCFMwDPvsaTTbvMLKZWSy0R5g==
|
||||
dependencies:
|
||||
has-flag "^4.0.0"
|
||||
|
||||
table@^3.7.8:
|
||||
version "3.8.3"
|
||||
resolved "https://registry.yarnpkg.com/table/-/table-3.8.3.tgz#2bbc542f0fda9861a755d3947fefd8b3f513855f"
|
||||
|
@ -1922,20 +1784,10 @@ type-check@~0.3.2:
|
|||
dependencies:
|
||||
prelude-ls "~1.1.2"
|
||||
|
||||
type-detect@4.0.8, type-detect@^4.0.8:
|
||||
version "4.0.8"
|
||||
resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c"
|
||||
integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==
|
||||
|
||||
type@^1.0.1:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0"
|
||||
integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==
|
||||
|
||||
type@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/type/-/type-2.0.0.tgz#5f16ff6ef2eb44f260494dae271033b29c09a9c3"
|
||||
integrity sha512-KBt58xCHry4Cejnc2ISQAF7QY+ORngsWfxezO68+12hKV6lQY8P/psIkcbjeHWn7MqcgciWJyCCevFMJdIXpow==
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/type/-/type-1.0.1.tgz#084c9a17fcc9151a2cdb1459905c2e45e4bb7d61"
|
||||
integrity sha512-MAM5dBMJCJNKs9E7JXo4CXRAansRfG0nlJxW7Wf6GZzSOvH31zClSaHdIMWLehe/EGMBkqeC55rrkaOr5Oo7Nw==
|
||||
|
||||
typedarray-to-buffer@~3.1.5:
|
||||
version "3.1.5"
|
||||
|
@ -2004,10 +1856,10 @@ werelogs@scality/werelogs#0ff7ec82:
|
|||
dependencies:
|
||||
safe-json-stringify "1.0.3"
|
||||
|
||||
word-wrap@~1.2.3:
|
||||
version "1.2.3"
|
||||
resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c"
|
||||
integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==
|
||||
wordwrap@~1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb"
|
||||
integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=
|
||||
|
||||
wrappy@1:
|
||||
version "1.0.2"
|
||||
|
|
Loading…
Reference in New Issue