Compare commits
9 Commits
developmen
...
feature/li
Author | SHA1 | Date |
---|---|---|
Nicolas Humbert | b11e9fdfc7 | |
Nicolas Humbert | e565ff2efc | |
Nicolas Humbert | d7f8a20e0c | |
Nicolas Humbert | 1a897ec3b7 | |
Nicolas Humbert | a56eb26d97 | |
Nicolas Humbert | 219e128a93 | |
Nicolas Humbert | 494f297bd4 | |
Nicolas Humbert | 11f618f8a1 | |
Nicolas Humbert | 454c61f971 |
3
index.ts
3
index.ts
|
@ -43,6 +43,9 @@ export const algorithms = {
|
||||||
DelimiterVersions: require('./lib/algos/list/delimiterVersions').DelimiterVersions,
|
DelimiterVersions: require('./lib/algos/list/delimiterVersions').DelimiterVersions,
|
||||||
DelimiterMaster: require('./lib/algos/list/delimiterMaster').DelimiterMaster,
|
DelimiterMaster: require('./lib/algos/list/delimiterMaster').DelimiterMaster,
|
||||||
MPU: require('./lib/algos/list/MPU').MultipartUploads,
|
MPU: require('./lib/algos/list/MPU').MultipartUploads,
|
||||||
|
DelimiterCurrent: require('./lib/algos/list/delimiterCurrent').DelimiterCurrent,
|
||||||
|
DelimiterNonCurrent: require('./lib/algos/list/delimiterNonCurrent').DelimiterNonCurrent,
|
||||||
|
DelimiterOrphanDeleteMarker: require('./lib/algos/list/delimiterOrphanDeleteMarker').DelimiterOrphanDeleteMarker,
|
||||||
},
|
},
|
||||||
listTools: {
|
listTools: {
|
||||||
DelimiterTools: require('./lib/algos/list/tools'),
|
DelimiterTools: require('./lib/algos/list/tools'),
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
const { Delimiter } = require('./delimiter');
|
const { Delimiter } = require('./delimiter');
|
||||||
|
const { FILTER_ACCEPT, FILTER_END } = require('./tools');
|
||||||
|
|
||||||
type ResultObject = {
|
type ResultObject = {
|
||||||
Contents: {
|
Contents: {
|
||||||
|
@ -9,6 +10,8 @@ type ResultObject = {
|
||||||
NextMarker ?: string;
|
NextMarker ?: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const DELIMITER_TIMEOUT_MS = 10 * 1000; // 10s
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Handle object listing with parameters. This extends the base class Delimiter
|
* Handle object listing with parameters. This extends the base class Delimiter
|
||||||
* to return the master/current versions.
|
* to return the master/current versions.
|
||||||
|
@ -25,6 +28,9 @@ class DelimiterCurrent extends Delimiter {
|
||||||
super(parameters, logger, vFormat);
|
super(parameters, logger, vFormat);
|
||||||
|
|
||||||
this.beforeDate = parameters.beforeDate;
|
this.beforeDate = parameters.beforeDate;
|
||||||
|
// used for monitoring
|
||||||
|
this.start = null;
|
||||||
|
this.evaluatedKeys = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
genMDParamsV1() {
|
genMDParamsV1() {
|
||||||
|
@ -35,9 +41,52 @@ class DelimiterCurrent extends Delimiter {
|
||||||
lt: this.beforeDate,
|
lt: this.beforeDate,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.start = Date.now();
|
||||||
|
|
||||||
return params;
|
return params;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_parse(s) {
|
||||||
|
let p;
|
||||||
|
try {
|
||||||
|
p = JSON.parse(s);
|
||||||
|
} catch (e: any) {
|
||||||
|
this.logger.warn(
|
||||||
|
'Could not parse Object Metadata while listing',
|
||||||
|
{ err: e.toString() });
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
filter(obj) {
|
||||||
|
if (this.start && Date.now() - this.start > DELIMITER_TIMEOUT_MS) {
|
||||||
|
this.IsTruncated = true;
|
||||||
|
this.logger.info('listing stopped after expected internal timeout',
|
||||||
|
{
|
||||||
|
timeoutMs: DELIMITER_TIMEOUT_MS,
|
||||||
|
evaluatedKeys: this.evaluatedKeys,
|
||||||
|
});
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
++this.evaluatedKeys;
|
||||||
|
|
||||||
|
const parsedValue = this._parse(obj.value);
|
||||||
|
// if parsing fails, skip the key.
|
||||||
|
if (parsedValue) {
|
||||||
|
const lastModified = parsedValue['last-modified'];
|
||||||
|
// We then check if the current version is older than the "beforeDate"
|
||||||
|
if (!this.beforeDate || (lastModified && lastModified < this.beforeDate)) {
|
||||||
|
return super.filter(obj);
|
||||||
|
}
|
||||||
|
// In the event of a timeout occurring before any content is added, make sure to update NextMarker.
|
||||||
|
const key = this.getObjectKey(obj);
|
||||||
|
this.NextMarker = key;
|
||||||
|
}
|
||||||
|
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
result(): ResultObject {
|
result(): ResultObject {
|
||||||
const result: ResultObject = {
|
const result: ResultObject = {
|
||||||
Contents: this.Contents,
|
Contents: this.Contents,
|
||||||
|
|
|
@ -0,0 +1,275 @@
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
const Delimiter = require('./delimiter').Delimiter;
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { inc, FILTER_ACCEPT, FILTER_END, SKIP_NONE, FILTER_SKIP } = require('./tools');
|
||||||
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
const Version = require('../../versioning/Version').Version;
|
||||||
|
const { DbPrefixes } = VSConst;
|
||||||
|
|
||||||
|
// TODO: find an acceptable timeout value.
|
||||||
|
const DELIMITER_TIMEOUT_MS = 10 * 1000; // 10s
|
||||||
|
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class Delimiter
|
||||||
|
* to return the raw non-current versions objects.
|
||||||
|
*/
|
||||||
|
class DelimiterNonCurrent extends Delimiter {
|
||||||
|
/**
|
||||||
|
* Delimiter listing of non-current versions.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} parameters.versionIdMarker - version id marker
|
||||||
|
* @param {String} parameters.beforeDate - limit the response to keys with stale date older than beforeDate
|
||||||
|
* “stale date” is the date on when a version becomes non-current.
|
||||||
|
* @param {String} parameters.keyMarker - key marker
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger, vFormat);
|
||||||
|
|
||||||
|
this.versionIdMarker = parameters.versionIdMarker;
|
||||||
|
this.beforeDate = parameters.beforeDate;
|
||||||
|
this.keyMarker = parameters.keyMarker;
|
||||||
|
this.nextContinueMarker = 'keyMarker';
|
||||||
|
this.NextKeyMarker = null;
|
||||||
|
|
||||||
|
this.skipping = this.skippingV1;
|
||||||
|
this.genMDParams = this.genMDParamsV1;
|
||||||
|
|
||||||
|
// internal state
|
||||||
|
this.staleDate = null;
|
||||||
|
this.masterKey = undefined;
|
||||||
|
this.masterVersionId = undefined;
|
||||||
|
|
||||||
|
// used for monitoring
|
||||||
|
this.evaluatedKeys = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV1() {
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
compareObjects(masterObj, versionObj) {
|
||||||
|
const masterKey = masterObj.key.slice(DbPrefixes.Master.length);
|
||||||
|
const versionKey = versionObj.key.slice(DbPrefixes.Version.length);
|
||||||
|
return masterKey < versionKey ? -1 : 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV1() {
|
||||||
|
const vParams = {
|
||||||
|
gte: DbPrefixes.Version,
|
||||||
|
lt: inc(DbPrefixes.Version),
|
||||||
|
};
|
||||||
|
|
||||||
|
const mParams = {
|
||||||
|
gte: DbPrefixes.Master,
|
||||||
|
lt: inc(DbPrefixes.Master),
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.prefix) {
|
||||||
|
const masterWithPrefix = `${DbPrefixes.Master}${this.prefix}`;
|
||||||
|
mParams.gte = masterWithPrefix;
|
||||||
|
mParams.lt = inc(masterWithPrefix);
|
||||||
|
|
||||||
|
const versionWithPrefix = `${DbPrefixes.Version}${this.prefix}`;
|
||||||
|
vParams.gte = versionWithPrefix;
|
||||||
|
vParams.lt = inc(versionWithPrefix);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.keyMarker && `${DbPrefixes.Version}${this.keyMarker}` >= vParams.gte) {
|
||||||
|
if (this.versionIdMarker) {
|
||||||
|
const keyMarkerWithVersionId = `${this.keyMarker}${VID_SEP}${this.versionIdMarker}`;
|
||||||
|
// versionIdMarker should always come with keyMarker but may not be the other way around.
|
||||||
|
// NOTE: "gte" (instead of "gt") is used to include the last version of the "previous"
|
||||||
|
// truncated listing when a versionId marker is specified.
|
||||||
|
// This "previous"/"already evaluated" version will be used to retrieve the stale date and
|
||||||
|
// skipped to not evaluate the same key twice in the addContents() method.
|
||||||
|
vParams.gte = `${DbPrefixes.Version}${keyMarkerWithVersionId}`;
|
||||||
|
mParams.gte = `${DbPrefixes.Master}${keyMarkerWithVersionId}`;
|
||||||
|
} else {
|
||||||
|
delete vParams.gte;
|
||||||
|
delete mParams.gte;
|
||||||
|
vParams.gt = DbPrefixes.Version + inc(this.keyMarker + VID_SEP);
|
||||||
|
mParams.gt = DbPrefixes.Master + inc(this.keyMarker + VID_SEP);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.start = Date.now();
|
||||||
|
|
||||||
|
return [mParams, vParams];
|
||||||
|
}
|
||||||
|
|
||||||
|
getLastModified(value) {
|
||||||
|
let lastModified;
|
||||||
|
try {
|
||||||
|
const v = JSON.parse(value);
|
||||||
|
lastModified = v['last-modified'];
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('could not parse Object Metadata while listing',
|
||||||
|
{
|
||||||
|
method: 'getLastModified',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return lastModified;
|
||||||
|
}
|
||||||
|
|
||||||
|
parseKey(fullKey) {
|
||||||
|
const versionIdIndex = fullKey.indexOf(VID_SEP);
|
||||||
|
if (versionIdIndex === -1) {
|
||||||
|
return { key: fullKey };
|
||||||
|
}
|
||||||
|
const nonversionedKey = fullKey.slice(0, versionIdIndex);
|
||||||
|
const versionId = fullKey.slice(versionIdIndex + 1);
|
||||||
|
return { key: nonversionedKey, versionId };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter to apply on each iteration
|
||||||
|
* @param {Object} obj - The key and value of the element
|
||||||
|
* @param {String} obj.key - The key of the element
|
||||||
|
* @param {String} obj.value - The value of the element
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
filter(obj) {
|
||||||
|
const value = obj.value;
|
||||||
|
// NOTE: this check on PHD is only useful for Artesca, S3C
|
||||||
|
// does not use PHDs in V1 format
|
||||||
|
if (Version.isPHD(value)) {
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
return super.filter(obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
||||||
|
* thanks to the way version ids are generated.
|
||||||
|
* DESCRIPTION: For a given key, the latest version is skipped since it represents the current version or
|
||||||
|
* the last version of the previous truncated listing.
|
||||||
|
* The current last-modified date is kept in memory and used as a "stale date" for the following version.
|
||||||
|
* The following version is pushed only if the "stale date" (picked up from the previous version)
|
||||||
|
* is available (JSON.parse has not failed), if the "beforeDate" argument is specified, and
|
||||||
|
* the "stale date" is older than the "beforeDate".
|
||||||
|
* The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
||||||
|
* the following version.
|
||||||
|
* The process stops and returns the available results if either:
|
||||||
|
* - no more metadata key is left to be processed
|
||||||
|
* - the listing reaches the maximum number of key to be returned
|
||||||
|
* - the internal timeout is reached
|
||||||
|
* @param {String} keyVersionSuffix - The key to add
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
addContents(keyVersionSuffix, value) {
|
||||||
|
const { key, versionId } = this.parseKey(keyVersionSuffix);
|
||||||
|
|
||||||
|
if (key === this.keyMarker) {
|
||||||
|
// since the nonversioned key equals the marker, there is
|
||||||
|
// necessarily a versionId in this key
|
||||||
|
if (versionId < this.versionIdMarker) {
|
||||||
|
// skip all versions until marker
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.start && Date.now() - this.start > DELIMITER_TIMEOUT_MS) {
|
||||||
|
this.IsTruncated = true;
|
||||||
|
this.logger.info('listing stopped after expected internal timeout',
|
||||||
|
{
|
||||||
|
timeoutMs: DELIMITER_TIMEOUT_MS,
|
||||||
|
evaluatedKeys: this.evaluatedKeys,
|
||||||
|
});
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
++this.evaluatedKeys;
|
||||||
|
|
||||||
|
this.NextKeyMarker = key;
|
||||||
|
this.NextVersionIdMarker = versionId;
|
||||||
|
|
||||||
|
// The master key serves two purposes:
|
||||||
|
// - It retrieves the expiration date for the previous version that is no longer current.
|
||||||
|
// - It excludes the current version from the list.
|
||||||
|
const isMasterKey = versionId === undefined;
|
||||||
|
if (isMasterKey) {
|
||||||
|
this.masterKey = key;
|
||||||
|
this.masterVersionId = Version.from(value).getVersionId() || 'null';
|
||||||
|
|
||||||
|
this.staleDate = this.getLastModified(value);
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
const isCurrentVersion = this.masterKey === key && this.masterVersionId === versionId;
|
||||||
|
if (isCurrentVersion) {
|
||||||
|
// filter out the master version
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
// The following version is pushed only:
|
||||||
|
// - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed),
|
||||||
|
// - if "beforeDate" is not specified or if specified and the "stale date" is older.
|
||||||
|
// - if "excludedDataStoreName" is not specified or if specified and the data store name is different
|
||||||
|
let lastModified;
|
||||||
|
if (this.staleDate && (!this.beforeDate || this.staleDate < this.beforeDate)) {
|
||||||
|
const v = this.trimMetadataAddStaleDate(value, this.staleDate);
|
||||||
|
lastModified = v.lastModified;
|
||||||
|
const { contentValue } = v;
|
||||||
|
// check that trimMetadataAddStaleDate succeeds to only push objects with a defined staleDate.
|
||||||
|
if (contentValue) {
|
||||||
|
this.Contents.push({ key, value: contentValue });
|
||||||
|
++this.keys;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
||||||
|
// the following version.
|
||||||
|
this.staleDate = lastModified || this.getLastModified(value);
|
||||||
|
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
trimMetadataAddStaleDate(value, staleDate) {
|
||||||
|
let ret = undefined;
|
||||||
|
let lastModified = undefined;
|
||||||
|
try {
|
||||||
|
ret = JSON.parse(value);
|
||||||
|
ret.staleDate = staleDate;
|
||||||
|
lastModified = ret['last-modified'];
|
||||||
|
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||||
|
delete ret.location;
|
||||||
|
}
|
||||||
|
ret = JSON.stringify(ret);
|
||||||
|
} catch (e) {
|
||||||
|
// Prefer returning an unfiltered data rather than
|
||||||
|
// stopping the service in case of parsing failure.
|
||||||
|
// The risk of this approach is a potential
|
||||||
|
// reproduction of MD-692, where too much memory is
|
||||||
|
// used by repd.
|
||||||
|
this.logger.warn('could not parse Object Metadata while listing',
|
||||||
|
{
|
||||||
|
method: 'trimMetadataAddStaleDate',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return { contentValue: ret, lastModified };
|
||||||
|
}
|
||||||
|
|
||||||
|
result() {
|
||||||
|
const result = {
|
||||||
|
Contents: this.Contents,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.IsTruncated) {
|
||||||
|
result.NextKeyMarker = this.NextKeyMarker;
|
||||||
|
result.NextVersionIdMarker = this.NextVersionIdMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports = { DelimiterNonCurrent };
|
|
@ -0,0 +1,202 @@
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
const Delimiter = require('./delimiter').Delimiter;
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { inc, FILTER_ACCEPT, FILTER_END, SKIP_NONE } = require('./tools');
|
||||||
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
const { DbPrefixes } = VSConst;
|
||||||
|
|
||||||
|
const DELIMITER_TIMEOUT_MS = 10 * 1000; // 10s
|
||||||
|
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class Delimiter
|
||||||
|
* to return the orphan delete markers. Orphan delete markers are also
|
||||||
|
* refered as expired object delete marker.
|
||||||
|
* They are delete marker with zero noncurrent versions.
|
||||||
|
*/
|
||||||
|
class DelimiterOrphanDeleteMarker extends Delimiter {
|
||||||
|
/**
|
||||||
|
* Delimiter listing of non-current versions.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger, vFormat);
|
||||||
|
|
||||||
|
this.beforeDate = parameters.beforeDate;
|
||||||
|
|
||||||
|
this.skipping = this.skippingV1;
|
||||||
|
this.genMDParams = this.genMDParamsV1;
|
||||||
|
|
||||||
|
this.keyName = null;
|
||||||
|
this.staleDate = null;
|
||||||
|
this.value = null;
|
||||||
|
|
||||||
|
// used for monitoring
|
||||||
|
this.evaluatedKeys = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV1() {
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
_reachedMaxKeys() {
|
||||||
|
if (this.keys >= this.maxKeys) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV1() {
|
||||||
|
const params = {
|
||||||
|
gte: DbPrefixes.Version,
|
||||||
|
lt: inc(DbPrefixes.Version),
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.prefix) {
|
||||||
|
params.gte = `${DbPrefixes.Version}${this.prefix}`;
|
||||||
|
params.lt = `${DbPrefixes.Version}${inc(this.prefix)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.marker && `${DbPrefixes.Version}${this.marker}` >= params.gte) {
|
||||||
|
delete params.gte;
|
||||||
|
params.gt = DbPrefixes.Version
|
||||||
|
+ inc(this.marker + VID_SEP);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.start = Date.now();
|
||||||
|
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
_addOrphan() {
|
||||||
|
const parsedValue = this._parse(this.value);
|
||||||
|
// if parsing fails, skip the key.
|
||||||
|
if (parsedValue) {
|
||||||
|
const lastModified = parsedValue['last-modified'];
|
||||||
|
const isDeleteMarker = parsedValue.isDeleteMarker;
|
||||||
|
// We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
||||||
|
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) && isDeleteMarker) {
|
||||||
|
// Prefer returning an untrimmed data rather than stopping the service in case of parsing failure.
|
||||||
|
const s = this._trimAndStringify(parsedValue) || this.value;
|
||||||
|
this.Contents.push({ key: this.keyName, value: s });
|
||||||
|
this.NextMarker = this.keyName;
|
||||||
|
++this.keys;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_parse(s) {
|
||||||
|
let p;
|
||||||
|
try {
|
||||||
|
p = JSON.parse(s);
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn(
|
||||||
|
'Could not parse Object Metadata while listing',
|
||||||
|
{ err: e.toString() });
|
||||||
|
}
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
|
_trimAndStringify(value) {
|
||||||
|
const p = value;
|
||||||
|
let s = undefined;
|
||||||
|
try {
|
||||||
|
if (p.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||||
|
delete p.location;
|
||||||
|
}
|
||||||
|
s = JSON.stringify(p);
|
||||||
|
} catch (e) {
|
||||||
|
this.logger.warn('could not trim and stringify Object Metadata while listing',
|
||||||
|
{
|
||||||
|
method: 'trimMetadataAddStaleDate',
|
||||||
|
err: e.toString(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
||||||
|
* thanks to the way version ids are generated.
|
||||||
|
* DESCRIPTION: For a given key, the latest version is kept in memory since it is the current version.
|
||||||
|
* If the following version reference a new key, it means that the previous one was an orphan version.
|
||||||
|
* We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
||||||
|
* The process stops and returns the available results if either:
|
||||||
|
* - no more metadata key is left to be processed
|
||||||
|
* - the listing reaches the maximum number of key to be returned
|
||||||
|
* - the internal timeout is reached
|
||||||
|
* NOTE: we cannot leverage MongoDB to list keys older than "beforeDate"
|
||||||
|
* because then we will not be able to assess its orphanage.
|
||||||
|
* @param {String} keyVersionSuffix - The key with version id as a suffix.
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
addContents(keyVersionSuffix, value) {
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.start && Date.now() - this.start > DELIMITER_TIMEOUT_MS) {
|
||||||
|
this.IsTruncated = true;
|
||||||
|
this.NextMarker = this.keyName;
|
||||||
|
|
||||||
|
this.logger.info('listing stopped after expected internal timeout',
|
||||||
|
{
|
||||||
|
timeoutMs: DELIMITER_TIMEOUT_MS,
|
||||||
|
evaluatedKeys: this.evaluatedKeys,
|
||||||
|
});
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
++this.evaluatedKeys;
|
||||||
|
|
||||||
|
const versionIdIndex = keyVersionSuffix.indexOf(VID_SEP);
|
||||||
|
// key without version suffix
|
||||||
|
const key = keyVersionSuffix.slice(0, versionIdIndex);
|
||||||
|
|
||||||
|
// For a given key, the youngest version is kept in memory since it represents the current version.
|
||||||
|
if (key !== this.keyName) {
|
||||||
|
// If this.value is defined, it means that <this.keyName, this.value> pair is "allowed" to be an orphan.
|
||||||
|
if (this.value) {
|
||||||
|
this._addOrphan();
|
||||||
|
}
|
||||||
|
this.keyName = key;
|
||||||
|
this.value = value;
|
||||||
|
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.keyName = key;
|
||||||
|
this.value = null;
|
||||||
|
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
result() {
|
||||||
|
// The following check makes sure the last orphan delete marker is not forgotten.
|
||||||
|
if (this.keys < this.maxKeys) {
|
||||||
|
if (this.value) {
|
||||||
|
this._addOrphan();
|
||||||
|
}
|
||||||
|
// The following make sure that if makeKeys is reached, isTruncated is set to true.
|
||||||
|
// We moved the "isTruncated" from _reachedMaxKeys to make sure we take into account the last entity
|
||||||
|
// if listing is truncated right before the last entity and the last entity is a orphan delete marker.
|
||||||
|
} else {
|
||||||
|
this.IsTruncated = this.maxKeys > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = {
|
||||||
|
Contents: this.Contents,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (this.IsTruncated) {
|
||||||
|
result.NextMarker = this.NextMarker;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
module.exports = { DelimiterOrphanDeleteMarker };
|
|
@ -7,4 +7,6 @@ module.exports = {
|
||||||
.DelimiterMaster,
|
.DelimiterMaster,
|
||||||
MPU: require('./MPU').MultipartUploads,
|
MPU: require('./MPU').MultipartUploads,
|
||||||
DelimiterCurrent: require('./delimiterCurrent').DelimiterCurrent,
|
DelimiterCurrent: require('./delimiterCurrent').DelimiterCurrent,
|
||||||
|
DelimiterNonCurrent: require('./delimiterNonCurrent').DelimiterNonCurrent,
|
||||||
|
DelimiterOrphanDeleteMarker: require('./delimiterOrphanDeleteMarker').DelimiterOrphanDeleteMarker,
|
||||||
};
|
};
|
||||||
|
|
|
@ -66,6 +66,7 @@ function _parseLifecycleListEntries(entries) {
|
||||||
Size: tmp['content-length'],
|
Size: tmp['content-length'],
|
||||||
ETag: tmp['content-md5'],
|
ETag: tmp['content-md5'],
|
||||||
VersionId: tmp.versionId,
|
VersionId: tmp.versionId,
|
||||||
|
IsNull: tmp.isNull,
|
||||||
LastModified: tmp['last-modified'],
|
LastModified: tmp['last-modified'],
|
||||||
Owner: {
|
Owner: {
|
||||||
DisplayName: tmp['owner-display-name'],
|
DisplayName: tmp['owner-display-name'],
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
const assert = require('assert');
|
const assert = require('assert');
|
||||||
|
|
||||||
const errors = require('../../../errors').default;
|
|
||||||
const BucketInfo = require('../../../models/BucketInfo').default;
|
const BucketInfo = require('../../../models/BucketInfo').default;
|
||||||
|
|
||||||
class BucketClientInterface {
|
class BucketClientInterface {
|
||||||
|
@ -112,7 +111,14 @@ class BucketClientInterface {
|
||||||
}
|
}
|
||||||
|
|
||||||
listLifecycleObject(bucketName, params, log, cb) {
|
listLifecycleObject(bucketName, params, log, cb) {
|
||||||
return process.nextTick(cb, errors.NotImplemented);
|
this.client.listObject(bucketName, log.getSerializedUids(), params,
|
||||||
|
(err, data) => {
|
||||||
|
if (err) {
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
return cb(null, JSON.parse(data));
|
||||||
|
});
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
listMultipartUploads(bucketName, params, log, cb) {
|
listMultipartUploads(bucketName, params, log, cb) {
|
||||||
|
|
|
@ -1633,10 +1633,11 @@ class MongoClientInterface {
|
||||||
const extName = params.listingType;
|
const extName = params.listingType;
|
||||||
|
|
||||||
const extension = new listAlgos[extName](params, log, vFormat);
|
const extension = new listAlgos[extName](params, log, vFormat);
|
||||||
const mainStreamParams = extension.genMDParams();
|
const extensionParams = extension.genMDParams();
|
||||||
|
|
||||||
const internalParams = {
|
const internalParams = {
|
||||||
mainStreamParams,
|
mainStreamParams: Array.isArray(extensionParams) ? extensionParams[0] : extensionParams,
|
||||||
|
secondaryStreamParams: Array.isArray(extensionParams) ? extensionParams[1] : null,
|
||||||
};
|
};
|
||||||
|
|
||||||
return this.internalListObject(bucketName, internalParams, extension, vFormat, log, cb);
|
return this.internalListObject(bucketName, internalParams, extension, vFormat, log, cb);
|
||||||
|
|
|
@ -75,4 +75,28 @@ describe('MongoClientInterface::metadata.listLifecycleObject::global', () => {
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('Should return error listing non-current versions if v0 key format', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert(err.NotImplemented);
|
||||||
|
assert(!data);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return error listing orphan delete markers if v0 key format', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert(err.NotImplemented);
|
||||||
|
assert(!data);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -0,0 +1,827 @@
|
||||||
|
const async = require('async');
|
||||||
|
const assert = require('assert');
|
||||||
|
const werelogs = require('werelogs');
|
||||||
|
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||||
|
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||||
|
const MetadataWrapper =
|
||||||
|
require('../../../../../lib/storage/metadata/MetadataWrapper');
|
||||||
|
const { versioning } = require('../../../../../index');
|
||||||
|
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||||
|
const { assertContents, makeBucketMD, putBulkObjectVersions, flagObjectForDeletion } = require('./utils');
|
||||||
|
|
||||||
|
const IMPL_NAME = 'mongodb';
|
||||||
|
const DB_NAME = 'metadata';
|
||||||
|
const BUCKET_NAME = 'test-lifecycle-list-non-current-bucket';
|
||||||
|
|
||||||
|
const mongoserver = new MongoMemoryReplSet({
|
||||||
|
debug: false,
|
||||||
|
instanceOpts: [
|
||||||
|
{ port: 27020 },
|
||||||
|
],
|
||||||
|
replSet: {
|
||||||
|
name: 'rs0',
|
||||||
|
count: 1,
|
||||||
|
DB_NAME,
|
||||||
|
storageEngine: 'ephemeralForTest',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('MongoClientInterface::metadata.listLifecycleObject::noncurrent', () => {
|
||||||
|
let metadata;
|
||||||
|
let collection;
|
||||||
|
const expectedVersionIds = {};
|
||||||
|
const key1 = 'pfx1-test-object';
|
||||||
|
const key2 = 'pfx2-test-object';
|
||||||
|
const key3 = 'pfx3-test-object';
|
||||||
|
|
||||||
|
beforeAll(done => {
|
||||||
|
mongoserver.waitUntilRunning().then(() => {
|
||||||
|
const opts = {
|
||||||
|
mongodb: {
|
||||||
|
replicaSetHosts: 'localhost:27020',
|
||||||
|
writeConcern: 'majority',
|
||||||
|
replicaSet: 'rs0',
|
||||||
|
readPreference: 'primary',
|
||||||
|
database: DB_NAME,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||||
|
metadata.client.defaultBucketKeyFormat = BucketVersioningKeyFormat.v1;
|
||||||
|
metadata.setup(done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(done => {
|
||||||
|
async.series([
|
||||||
|
next => metadata.close(next),
|
||||||
|
next => mongoserver.stop()
|
||||||
|
.then(() => next())
|
||||||
|
.catch(next),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
const bucketMD = makeBucketMD(BUCKET_NAME);
|
||||||
|
const versionParams = {
|
||||||
|
versioning: true,
|
||||||
|
versionId: null,
|
||||||
|
repairMaster: null,
|
||||||
|
};
|
||||||
|
async.series([
|
||||||
|
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
collection = metadata.client.getCollection(BUCKET_NAME);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const params = {
|
||||||
|
objName: key1,
|
||||||
|
objVal: {
|
||||||
|
key: key1,
|
||||||
|
versionId: 'null',
|
||||||
|
},
|
||||||
|
nbVersions: 5,
|
||||||
|
};
|
||||||
|
const timestamp = 0;
|
||||||
|
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||||
|
params.nbVersions, timestamp, logger, (err, data) => {
|
||||||
|
expectedVersionIds[key1] = data;
|
||||||
|
return next(err);
|
||||||
|
});
|
||||||
|
/* eslint-disable max-len */
|
||||||
|
// { "_id" : "Mpfx1-test-object", "value" : { "key" : "pfx1-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||||
|
// { "_id" : "Vpfx1-test-object{sep}id4", "value" : { "key" : "pfx1-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||||
|
// { "_id" : "Vpfx1-test-object{sep}id3", "value" : { "key" : "pfx1-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:00.004Z" } }
|
||||||
|
// { "_id" : "Vpfx1-test-object{sep}id2", "value" : { "key" : "pfx1-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:00.003Z" } }
|
||||||
|
// { "_id" : "Vpfx1-test-object{sep}id1", "value" : { "key" : "pfx1-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:00.002Z" } }
|
||||||
|
// { "_id" : "Vpfx1-test-object{sep}id0", "value" : { "key" : "pfx1-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||||
|
/* eslint-enable max-len */
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const params = {
|
||||||
|
objName: key2,
|
||||||
|
objVal: {
|
||||||
|
key: key2,
|
||||||
|
versionId: 'null',
|
||||||
|
},
|
||||||
|
nbVersions: 5,
|
||||||
|
};
|
||||||
|
const timestamp = 0;
|
||||||
|
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||||
|
params.nbVersions, timestamp, logger, (err, data) => {
|
||||||
|
expectedVersionIds[key2] = data;
|
||||||
|
return next(err);
|
||||||
|
});
|
||||||
|
/* eslint-disable max-len */
|
||||||
|
// { "_id" : "Mpfx2-test-object", "value" : { "key" : "pfx2-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||||
|
// { "_id" : "Vpfx2-test-object{sep}id4", "value" : { "key" : "pfx2-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||||
|
// { "_id" : "Vpfx2-test-object{sep}id3", "value" : { "key" : "pfx2-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:00.004Z" } }
|
||||||
|
// { "_id" : "Vpfx2-test-object{sep}id2", "value" : { "key" : "pfx2-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:00.003Z" } }
|
||||||
|
// { "_id" : "Vpfx2-test-object{sep}id1", "value" : { "key" : "pfx2-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:00.002Z" } }
|
||||||
|
// { "_id" : "Vpfx1-test-object{sep}id0", "value" : { "key" : "pfx2-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||||
|
/* eslint-enable max-len */
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const params = {
|
||||||
|
objName: key3,
|
||||||
|
objVal: {
|
||||||
|
key: key3,
|
||||||
|
versionId: 'null',
|
||||||
|
},
|
||||||
|
nbVersions: 5,
|
||||||
|
};
|
||||||
|
const timestamp = 0;
|
||||||
|
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||||
|
params.nbVersions, timestamp, logger, (err, data) => {
|
||||||
|
expectedVersionIds[key3] = data;
|
||||||
|
return next(err);
|
||||||
|
});
|
||||||
|
/* eslint-disable max-len */
|
||||||
|
// { "_id" : "Mpfx3-test-object", "value" : { "key" : "pfx3-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||||
|
// { "_id" : "Vpfx3-test-object{sep}id4", "value" : { "key" : "pfx3-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||||
|
// { "_id" : "Vpfx3-test-object{sep}id3", "value" : { "key" : "pfx3-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:00.004Z" } }
|
||||||
|
// { "_id" : "Vpfx3-test-object{sep}id2", "value" : { "key" : "pfx3-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:00.003Z" } }
|
||||||
|
// { "_id" : "Vpfx3-test-object{sep}id1", "value" : { "key" : "pfx3-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:00.002Z" } }
|
||||||
|
// { "_id" : "Vpfx3-test-object{sep}id0", "value" : { "key" : "pfx3-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||||
|
/* eslint-enable max-len */
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => metadata.deleteBucket(BUCKET_NAME, logger, done));
|
||||||
|
|
||||||
|
it('Should list non-current versions', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 12);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: key1,
|
||||||
|
LastModified: '1970-01-01T00:00:00.004Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.005Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key1,
|
||||||
|
LastModified: '1970-01-01T00:00:00.003Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.004Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key1,
|
||||||
|
LastModified: '1970-01-01T00:00:00.002Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key1,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.004Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.005Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.003Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.004Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.002Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key3,
|
||||||
|
LastModified: '1970-01-01T00:00:00.004Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.005Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key3,
|
||||||
|
LastModified: '1970-01-01T00:00:00.003Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.004Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key3,
|
||||||
|
LastModified: '1970-01-01T00:00:00.002Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key3,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
const key1VersionIds = data.Contents.filter(k => k.key === key1).map(k => k.value.VersionId);
|
||||||
|
assert.deepStrictEqual(key1VersionIds, expectedVersionIds[key1]);
|
||||||
|
|
||||||
|
const key2VersionIds = data.Contents.filter(k => k.key === key2).map(k => k.value.VersionId);
|
||||||
|
assert.deepStrictEqual(key2VersionIds, expectedVersionIds[key2]);
|
||||||
|
|
||||||
|
const key3VersionIds = data.Contents.filter(k => k.key === key2).map(k => k.value.VersionId);
|
||||||
|
assert.deepStrictEqual(key3VersionIds, expectedVersionIds[key2]);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return empty list when beforeDate is before the objects stale date', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
beforeDate: '1970-01-01T00:00:00.000Z',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 0);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the non-current versions with stale date older than 1970-01-01T00:00:00.003Z', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
beforeDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 3);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: key1,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
VersionId: expectedVersionIds[key1][3],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
VersionId: expectedVersionIds[key2][3],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key3,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
VersionId: expectedVersionIds[key3][3],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list non-current versions three by three', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
maxKeys: 3,
|
||||||
|
};
|
||||||
|
|
||||||
|
return async.series([
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.NextKeyMarker, key1);
|
||||||
|
assert.strictEqual(data.NextVersionIdMarker, data.Contents[2].value.VersionId);
|
||||||
|
assert.strictEqual(data.Contents.length, 3);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: key1,
|
||||||
|
LastModified: '1970-01-01T00:00:00.004Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.005Z',
|
||||||
|
VersionId: expectedVersionIds[key1][0],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key1,
|
||||||
|
LastModified: '1970-01-01T00:00:00.003Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.004Z',
|
||||||
|
VersionId: expectedVersionIds[key1][1],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key1,
|
||||||
|
LastModified: '1970-01-01T00:00:00.002Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
VersionId: expectedVersionIds[key1][2],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
params.keyMarker = data.NextKeyMarker;
|
||||||
|
params.versionIdMarker = data.NextVersionIdMarker;
|
||||||
|
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.NextKeyMarker, key2);
|
||||||
|
assert.strictEqual(data.NextVersionIdMarker, data.Contents[2].value.VersionId);
|
||||||
|
assert.strictEqual(data.Contents.length, 3);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: key1,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
VersionId: expectedVersionIds[key1][3],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.004Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.005Z',
|
||||||
|
VersionId: expectedVersionIds[key2][0],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.003Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.004Z',
|
||||||
|
VersionId: expectedVersionIds[key2][1],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
params.keyMarker = data.NextKeyMarker;
|
||||||
|
params.versionIdMarker = data.NextVersionIdMarker;
|
||||||
|
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.NextKeyMarker, key3);
|
||||||
|
assert.strictEqual(data.NextVersionIdMarker, data.Contents[2].value.VersionId);
|
||||||
|
assert.strictEqual(data.Contents.length, 3);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.002Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
VersionId: expectedVersionIds[key2][2],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
VersionId: expectedVersionIds[key2][3],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key3,
|
||||||
|
LastModified: '1970-01-01T00:00:00.004Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.005Z',
|
||||||
|
VersionId: expectedVersionIds[key3][0],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
params.keyMarker = data.NextKeyMarker;
|
||||||
|
params.versionIdMarker = data.NextVersionIdMarker;
|
||||||
|
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 3);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: key3,
|
||||||
|
LastModified: '1970-01-01T00:00:00.003Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.004Z',
|
||||||
|
VersionId: expectedVersionIds[key3][1],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key3,
|
||||||
|
LastModified: '1970-01-01T00:00:00.002Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
VersionId: expectedVersionIds[key3][2],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key3,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
VersionId: expectedVersionIds[key3][3],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list non-current versions four by four', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
maxKeys: 4,
|
||||||
|
};
|
||||||
|
|
||||||
|
return async.series([
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.NextKeyMarker, key1);
|
||||||
|
assert.strictEqual(data.NextVersionIdMarker, data.Contents[3].value.VersionId);
|
||||||
|
assert.strictEqual(data.Contents.length, 4);
|
||||||
|
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: key1,
|
||||||
|
LastModified: '1970-01-01T00:00:00.004Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.005Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key1,
|
||||||
|
LastModified: '1970-01-01T00:00:00.003Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.004Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key1,
|
||||||
|
LastModified: '1970-01-01T00:00:00.002Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key1,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
const key1VersionIds = data.Contents.filter(k => k.key === key1).map(k => k.value.VersionId);
|
||||||
|
assert.deepStrictEqual(key1VersionIds, expectedVersionIds[key1]);
|
||||||
|
|
||||||
|
params.keyMarker = data.NextKeyMarker;
|
||||||
|
params.versionIdMarker = data.NextVersionIdMarker;
|
||||||
|
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.NextKeyMarker, key2);
|
||||||
|
assert.strictEqual(data.NextVersionIdMarker, data.Contents[3].value.VersionId);
|
||||||
|
assert.strictEqual(data.Contents.length, 4);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.004Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.005Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.003Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.004Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.002Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
const key2VersionIds = data.Contents.filter(k => k.key === key2).map(k => k.value.VersionId);
|
||||||
|
assert.deepStrictEqual(key2VersionIds, expectedVersionIds[key2]);
|
||||||
|
|
||||||
|
params.keyMarker = data.NextKeyMarker;
|
||||||
|
params.versionIdMarker = data.NextVersionIdMarker;
|
||||||
|
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 4);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: key3,
|
||||||
|
LastModified: '1970-01-01T00:00:00.004Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.005Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key3,
|
||||||
|
LastModified: '1970-01-01T00:00:00.003Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.004Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key3,
|
||||||
|
LastModified: '1970-01-01T00:00:00.002Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key3,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
const key3VersionIds = data.Contents.filter(k => k.key === key3).map(k => k.value.VersionId);
|
||||||
|
assert.deepStrictEqual(key3VersionIds, expectedVersionIds[key3]);
|
||||||
|
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list non-current versions with a specific prefix two by two', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
maxKeys: 2,
|
||||||
|
prefix: 'pfx2',
|
||||||
|
};
|
||||||
|
|
||||||
|
return async.series([
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.NextKeyMarker, key2);
|
||||||
|
assert.strictEqual(data.NextVersionIdMarker, data.Contents[1].value.VersionId);
|
||||||
|
assert.strictEqual(data.Contents.length, 2);
|
||||||
|
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.004Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.005Z',
|
||||||
|
VersionId: expectedVersionIds[key2][0],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.003Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.004Z',
|
||||||
|
VersionId: expectedVersionIds[key2][1],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
params.keyMarker = data.NextKeyMarker;
|
||||||
|
params.versionIdMarker = data.NextVersionIdMarker;
|
||||||
|
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 2);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.002Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
VersionId: expectedVersionIds[key2][2],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
VersionId: expectedVersionIds[key2][3],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
params.keyMarker = data.NextKeyMarker;
|
||||||
|
params.versionIdMarker = data.NextVersionIdMarker;
|
||||||
|
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return truncated list of non-current versions after pfx1-test-object key marker', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
maxKeys: 4,
|
||||||
|
keyMarker: key1,
|
||||||
|
};
|
||||||
|
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.NextKeyMarker, key2);
|
||||||
|
assert.strictEqual(data.NextVersionIdMarker, data.Contents[3].value.VersionId);
|
||||||
|
assert.strictEqual(data.Contents.length, 4);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.004Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.005Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.003Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.004Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.002Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
const key2VersionIds = data.Contents.filter(k => k.key === key2).map(k => k.value.VersionId);
|
||||||
|
assert.deepStrictEqual(key2VersionIds, expectedVersionIds[key2]);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list non-current versions that start with prefix', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
prefix: 'pfx2',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 4);
|
||||||
|
const expected = [{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.004Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.005Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.003Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.004Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.002Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
}];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
const key2VersionIds = data.Contents.filter(k => k.key === key2).map(k => k.value.VersionId);
|
||||||
|
assert.deepStrictEqual(key2VersionIds, expectedVersionIds[key2]);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list non-current version that start with prefix and older than beforedate', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
prefix: 'pfx2',
|
||||||
|
maxKeys: 1,
|
||||||
|
beforeDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
const expected = [{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.001Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.002Z',
|
||||||
|
VersionId: expectedVersionIds[key2][3],
|
||||||
|
}];
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should truncate list of non-current versions that start with prefix and older than beforedate', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
prefix: 'pfx2',
|
||||||
|
maxKeys: 2,
|
||||||
|
beforeDate: '1970-01-01T00:00:00.005Z',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.NextKeyMarker, key2);
|
||||||
|
assert.strictEqual(data.NextVersionIdMarker, data.Contents[1].value.VersionId);
|
||||||
|
const expected = [
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.003Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.004Z',
|
||||||
|
VersionId: expectedVersionIds[key2][1],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: key2,
|
||||||
|
LastModified: '1970-01-01T00:00:00.002Z',
|
||||||
|
staleDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
VersionId: expectedVersionIds[key2][2],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
assert.strictEqual(data.Contents.length, 2);
|
||||||
|
assertContents(data.Contents, expected);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should not take phd master key into account when listing non-current versions', done => {
|
||||||
|
const objVal = {
|
||||||
|
'key': 'pfx4-test-object',
|
||||||
|
'versionId': 'null',
|
||||||
|
'last-modified': new Date(10000).toISOString(),
|
||||||
|
};
|
||||||
|
const versionParams = {
|
||||||
|
versioning: true,
|
||||||
|
};
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
prefix: 'pfx4',
|
||||||
|
};
|
||||||
|
let earlyVersionId;
|
||||||
|
let lastVersionId;
|
||||||
|
async.series([
|
||||||
|
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||||
|
logger, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
earlyVersionId = JSON.parse(res).versionId;
|
||||||
|
return next(null);
|
||||||
|
}),
|
||||||
|
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||||
|
logger, next),
|
||||||
|
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||||
|
logger, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
lastVersionId = JSON.parse(res).versionId;
|
||||||
|
return next(null);
|
||||||
|
}),
|
||||||
|
next => metadata.deleteObjectMD(BUCKET_NAME, 'pfx4-test-object', { versionId: lastVersionId },
|
||||||
|
logger, next),
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
assert.strictEqual(data.Contents[0].value.VersionId, earlyVersionId);
|
||||||
|
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should not list non current versions tagged for deletion', done => {
|
||||||
|
const objVal = {
|
||||||
|
'key': 'pfx4-test-object',
|
||||||
|
'versionId': 'null',
|
||||||
|
'last-modified': new Date(10000).toISOString(),
|
||||||
|
};
|
||||||
|
const versionParams = {
|
||||||
|
versioning: true,
|
||||||
|
};
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
prefix: 'pfx4',
|
||||||
|
};
|
||||||
|
|
||||||
|
async.series([
|
||||||
|
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||||
|
logger, next),
|
||||||
|
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||||
|
logger, next),
|
||||||
|
next => flagObjectForDeletion(collection, 'pfx4-test-object', next),
|
||||||
|
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 0);
|
||||||
|
return next();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,214 @@
|
||||||
|
const async = require('async');
|
||||||
|
const assert = require('assert');
|
||||||
|
const werelogs = require('werelogs');
|
||||||
|
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||||
|
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||||
|
const MetadataWrapper =
|
||||||
|
require('../../../../../lib/storage/metadata/MetadataWrapper');
|
||||||
|
const { versioning } = require('../../../../../index');
|
||||||
|
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||||
|
const { makeBucketMD } = require('./utils');
|
||||||
|
|
||||||
|
const IMPL_NAME = 'mongodb';
|
||||||
|
const DB_NAME = 'metadata';
|
||||||
|
|
||||||
|
const BUCKET_NAME = 'test-lifecycle-list-bucket-null';
|
||||||
|
|
||||||
|
const mongoserver = new MongoMemoryReplSet({
|
||||||
|
debug: false,
|
||||||
|
instanceOpts: [
|
||||||
|
{ port: 27020 },
|
||||||
|
],
|
||||||
|
replSet: {
|
||||||
|
name: 'rs0',
|
||||||
|
count: 1,
|
||||||
|
DB_NAME,
|
||||||
|
storageEngine: 'ephemeralForTest',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('MongoClientInterface::metadata.listLifecycleObject::nullVersion', () => {
|
||||||
|
let metadata;
|
||||||
|
|
||||||
|
beforeAll(done => {
|
||||||
|
mongoserver.waitUntilRunning().then(() => {
|
||||||
|
const opts = {
|
||||||
|
mongodb: {
|
||||||
|
replicaSetHosts: 'localhost:27020',
|
||||||
|
writeConcern: 'majority',
|
||||||
|
replicaSet: 'rs0',
|
||||||
|
readPreference: 'primary',
|
||||||
|
database: DB_NAME,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||||
|
metadata.client.defaultBucketKeyFormat = BucketVersioningKeyFormat.v1;
|
||||||
|
metadata.setup(done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(done => {
|
||||||
|
async.series([
|
||||||
|
next => metadata.close(next),
|
||||||
|
next => mongoserver.stop()
|
||||||
|
.then(() => next())
|
||||||
|
.catch(next),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
const bucketMD = makeBucketMD(BUCKET_NAME);
|
||||||
|
return metadata.createBucket(BUCKET_NAME, bucketMD, logger, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
const bucketMD = makeBucketMD(BUCKET_NAME);
|
||||||
|
const versionParams = {
|
||||||
|
versioning: true,
|
||||||
|
versionId: null,
|
||||||
|
repairMaster: null,
|
||||||
|
};
|
||||||
|
async.series([
|
||||||
|
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, next),
|
||||||
|
next => {
|
||||||
|
const objName = 'key0';
|
||||||
|
const timestamp = 0;
|
||||||
|
|
||||||
|
const lastModified = new Date(timestamp).toISOString();
|
||||||
|
const objVal = {
|
||||||
|
'key': objName,
|
||||||
|
'versionId': 'null',
|
||||||
|
'isNull': true,
|
||||||
|
'last-modified': lastModified,
|
||||||
|
};
|
||||||
|
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const objName = 'key1';
|
||||||
|
const timestamp = 0;
|
||||||
|
|
||||||
|
const lastModified = new Date(timestamp).toISOString();
|
||||||
|
const objVal = {
|
||||||
|
'key': objName,
|
||||||
|
'versionId': 'null',
|
||||||
|
'isNull': true,
|
||||||
|
'last-modified': lastModified,
|
||||||
|
};
|
||||||
|
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const objName = 'key1';
|
||||||
|
const timestamp = 0;
|
||||||
|
|
||||||
|
const lastModified = new Date(timestamp).toISOString();
|
||||||
|
const objVal = {
|
||||||
|
'key': objName,
|
||||||
|
'last-modified': lastModified,
|
||||||
|
};
|
||||||
|
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
|
||||||
|
},
|
||||||
|
// key2 simulates a scenario where:
|
||||||
|
// 1) bucket is versioned
|
||||||
|
// 2) put object key2
|
||||||
|
// 3) bucket versioning gets suspended
|
||||||
|
// 4) put object key2
|
||||||
|
// result:
|
||||||
|
// {
|
||||||
|
// "_id" : "Mkey0",
|
||||||
|
// "value" : {
|
||||||
|
// "key" : "key2",
|
||||||
|
// "isNull" : true,
|
||||||
|
// "versionId" : "<VersionId2>",
|
||||||
|
// "last-modified" : "2023-07-11T14:16:00.151Z",
|
||||||
|
// }
|
||||||
|
// },
|
||||||
|
// {
|
||||||
|
// "_id" : "Vkey0\u0000<VersionId1>",
|
||||||
|
// "value" : {
|
||||||
|
// "key" : "key2",
|
||||||
|
// "versionId" : "<VersionId1>",
|
||||||
|
// "tags" : {
|
||||||
|
// },
|
||||||
|
// "last-modified" : "2023-07-11T14:15:36.713Z",
|
||||||
|
// }
|
||||||
|
// },
|
||||||
|
next => {
|
||||||
|
const objName = 'key2';
|
||||||
|
const timestamp = 0;
|
||||||
|
|
||||||
|
const lastModified = new Date(timestamp).toISOString();
|
||||||
|
const objVal = {
|
||||||
|
'key': objName,
|
||||||
|
'last-modified': lastModified,
|
||||||
|
};
|
||||||
|
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, versionParams, logger, next);
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const objName = 'key2';
|
||||||
|
const timestamp = 0;
|
||||||
|
const params = {
|
||||||
|
versionId: '',
|
||||||
|
};
|
||||||
|
|
||||||
|
const lastModified = new Date(timestamp).toISOString();
|
||||||
|
const objVal = {
|
||||||
|
'key': objName,
|
||||||
|
'last-modified': lastModified,
|
||||||
|
'isNull': true,
|
||||||
|
};
|
||||||
|
return metadata.putObjectMD(BUCKET_NAME, objName, objVal, params, logger, next);
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => metadata.deleteBucket(BUCKET_NAME, logger, done));
|
||||||
|
|
||||||
|
it('Should list the null current version and set IsNull to true', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterCurrent',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 3);
|
||||||
|
|
||||||
|
// check that key0 has a null current version
|
||||||
|
const firstKey = data.Contents[0];
|
||||||
|
assert.strictEqual(firstKey.key, 'key0');
|
||||||
|
assert.strictEqual(firstKey.value.IsNull, true);
|
||||||
|
|
||||||
|
// check that key1 has no null current version
|
||||||
|
const secondKey = data.Contents[1];
|
||||||
|
assert.strictEqual(secondKey.key, 'key1');
|
||||||
|
assert(!secondKey.value.IsNull);
|
||||||
|
|
||||||
|
// check that key2 has a null current version
|
||||||
|
const thirdKey = data.Contents[2];
|
||||||
|
assert.strictEqual(thirdKey.key, 'key2');
|
||||||
|
assert.strictEqual(thirdKey.value.IsNull, true);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list the null non-current version and set IsNull to true', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterNonCurrent',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 2);
|
||||||
|
|
||||||
|
// check that key1 has a null non-current version
|
||||||
|
const firstKey = data.Contents[0];
|
||||||
|
assert.strictEqual(firstKey.key, 'key1');
|
||||||
|
assert.strictEqual(firstKey.value.IsNull, true);
|
||||||
|
|
||||||
|
// check that key2 has no null non-current version
|
||||||
|
const secondKey = data.Contents[1];
|
||||||
|
assert.strictEqual(secondKey.key, 'key2');
|
||||||
|
assert(!secondKey.value.IsNull);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,446 @@
|
||||||
|
const async = require('async');
|
||||||
|
const assert = require('assert');
|
||||||
|
const werelogs = require('werelogs');
|
||||||
|
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||||
|
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||||
|
const MetadataWrapper =
|
||||||
|
require('../../../../../lib/storage/metadata/MetadataWrapper');
|
||||||
|
const { versioning } = require('../../../../../index');
|
||||||
|
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||||
|
const { makeBucketMD, putBulkObjectVersions } = require('./utils');
|
||||||
|
|
||||||
|
const IMPL_NAME = 'mongodb';
|
||||||
|
const DB_NAME = 'metadata';
|
||||||
|
const BUCKET_NAME = 'test-lifecycle-list-orphan-bucket';
|
||||||
|
|
||||||
|
const mongoserver = new MongoMemoryReplSet({
|
||||||
|
debug: false,
|
||||||
|
instanceOpts: [
|
||||||
|
{ port: 27020 },
|
||||||
|
],
|
||||||
|
replSet: {
|
||||||
|
name: 'rs0',
|
||||||
|
count: 1,
|
||||||
|
DB_NAME,
|
||||||
|
storageEngine: 'ephemeralForTest',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('MongoClientInterface::metadata.listLifecycleObject::orphan', () => {
|
||||||
|
let metadata;
|
||||||
|
|
||||||
|
beforeAll(done => {
|
||||||
|
mongoserver.waitUntilRunning().then(() => {
|
||||||
|
const opts = {
|
||||||
|
mongodb: {
|
||||||
|
replicaSetHosts: 'localhost:27020',
|
||||||
|
writeConcern: 'majority',
|
||||||
|
replicaSet: 'rs0',
|
||||||
|
readPreference: 'primary',
|
||||||
|
database: DB_NAME,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||||
|
metadata.client.defaultBucketKeyFormat = BucketVersioningKeyFormat.v1;
|
||||||
|
metadata.setup(done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(done => {
|
||||||
|
async.series([
|
||||||
|
next => metadata.close(next),
|
||||||
|
next => mongoserver.stop()
|
||||||
|
.then(() => next())
|
||||||
|
.catch(next),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
const bucketMD = makeBucketMD(BUCKET_NAME);
|
||||||
|
const versionParams = {
|
||||||
|
versioning: true,
|
||||||
|
versionId: null,
|
||||||
|
repairMaster: null,
|
||||||
|
};
|
||||||
|
async.series([
|
||||||
|
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, next),
|
||||||
|
next => {
|
||||||
|
const keyName = 'pfx0-test-object';
|
||||||
|
|
||||||
|
const objVal = {
|
||||||
|
'key': keyName,
|
||||||
|
'isDeleteMarker': true,
|
||||||
|
'last-modified': new Date(0).toISOString(), // 1970-01-01T00:00:00.000Z
|
||||||
|
};
|
||||||
|
const params = {
|
||||||
|
versioning: true,
|
||||||
|
};
|
||||||
|
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const params = {
|
||||||
|
objName: 'pfx1-test-object',
|
||||||
|
objVal: {
|
||||||
|
key: 'pfx1-test-object',
|
||||||
|
versionId: 'null',
|
||||||
|
},
|
||||||
|
nbVersions: 1,
|
||||||
|
};
|
||||||
|
const timestamp = 0;
|
||||||
|
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||||
|
params.nbVersions, timestamp, logger, next);
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const params = {
|
||||||
|
objName: 'pfx2-test-object',
|
||||||
|
objVal: {
|
||||||
|
key: 'pfx2-test-object',
|
||||||
|
versionId: 'null',
|
||||||
|
},
|
||||||
|
nbVersions: 1,
|
||||||
|
};
|
||||||
|
const timestamp = 0;
|
||||||
|
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||||
|
params.nbVersions, timestamp, logger, next);
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const keyName = 'pfx2-test-object';
|
||||||
|
|
||||||
|
const objVal = {
|
||||||
|
'key': keyName,
|
||||||
|
'isDeleteMarker': true,
|
||||||
|
'last-modified': new Date(2).toISOString(), // 1970-01-01T00:00:00.002Z
|
||||||
|
};
|
||||||
|
const params = {
|
||||||
|
versioning: true,
|
||||||
|
};
|
||||||
|
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const keyName = 'pfx3-test-object';
|
||||||
|
|
||||||
|
const objVal = {
|
||||||
|
'key': keyName,
|
||||||
|
'isDeleteMarker': true,
|
||||||
|
'last-modified': new Date(0).toISOString(), // 1970-01-01T00:00:00.000Z
|
||||||
|
};
|
||||||
|
const params = {
|
||||||
|
versioning: true,
|
||||||
|
};
|
||||||
|
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const keyName = 'pfx4-test-object';
|
||||||
|
|
||||||
|
const objVal = {
|
||||||
|
'key': keyName,
|
||||||
|
'isDeleteMarker': true,
|
||||||
|
'last-modified': new Date(5).toISOString(), // 1970-01-01T00:00:00.005Z
|
||||||
|
};
|
||||||
|
const params = {
|
||||||
|
versioning: true,
|
||||||
|
};
|
||||||
|
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
const keyName = 'pfx4-test-object2';
|
||||||
|
|
||||||
|
const objVal = {
|
||||||
|
'key': keyName,
|
||||||
|
'isDeleteMarker': true,
|
||||||
|
'last-modified': new Date(6).toISOString(), // 1970-01-01T00:00:00.006Z
|
||||||
|
};
|
||||||
|
const params = {
|
||||||
|
versioning: true,
|
||||||
|
};
|
||||||
|
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
/* eslint-disable max-len */
|
||||||
|
// { "_id" : "Mpfx1-test-object", "value" : { "key" : "pfx1-test-object", "versionId" : "v1", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||||
|
// { "_id" : "Vpfx0-test-object{sep}v0", "value" : { "key" : "pfx0-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.000Z", "versionId" : "v0" } }
|
||||||
|
// { "_id" : "Vpfx1-test-object{sep}v1", "value" : { "key" : "pfx1-test-object", "versionId" : "v1", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||||
|
// { "_id" : "Vpfx2-test-object{sep}v3", "value" : { "key" : "pfx2-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.002Z", "versionId" : "v3" } }
|
||||||
|
// { "_id" : "Vpfx2-test-object{sep}v2", "value" : { "key" : "pfx2-test-object", "versionId" : "v2", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||||
|
// { "_id" : "Vpfx3-test-object{sep}v4", "value" : { "key" : "pfx3-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.000Z", "versionId" : "v4" } }
|
||||||
|
// { "_id" : "Vpfx4-test-object{sep}v5", "value" : { "key" : "pfx4-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.005Z", "versionId" : "v5" } }
|
||||||
|
// { "_id" : "Vpfx4-test-object2{sep}v6", "value" : { "key" : "pfx4-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.006Z", "versionId" : "v6" } }
|
||||||
|
/* eslint-enable max-len */
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
metadata.deleteBucket(BUCKET_NAME, logger, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list orphan delete markers', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert(!data.NextMarker);
|
||||||
|
assert.strictEqual(data.Contents.length, 4);
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||||
|
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
|
||||||
|
assert.strictEqual(data.Contents[2].key, 'pfx4-test-object');
|
||||||
|
assert.strictEqual(data.Contents[3].key, 'pfx4-test-object2');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return empty list when beforeDate is before youngest last-modified', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
beforeDate: '1970-01-01T00:00:00.000Z',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert(!data.NextMarker);
|
||||||
|
assert.strictEqual(data.Contents.length, 0);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list orphan delete markers older than 1970-01-01T00:00:00.003Z', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
beforeDate: '1970-01-01T00:00:00.003Z',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert(!data.NextMarker);
|
||||||
|
assert.strictEqual(data.Contents.length, 2);
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||||
|
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the first part of the orphan delete markers listing', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
maxKeys: 1,
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.NextMarker, 'pfx0-test-object');
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the second part of the orphan delete markers listing', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
marker: 'pfx0-test-object',
|
||||||
|
maxKeys: 1,
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx3-test-object');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the third part of the orphan delete markers listing', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
marker: 'pfx3-test-object',
|
||||||
|
maxKeys: 1,
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.NextMarker, 'pfx4-test-object');
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the fourth part of the orphan delete markers listing', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
marker: 'pfx4-test-object',
|
||||||
|
maxKeys: 1,
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert(!data.NextMarker);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object2');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list the two first orphan delete markers', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
maxKeys: 2,
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.Contents.length, 2);
|
||||||
|
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||||
|
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list the four first orphan delete markers', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
maxKeys: 4,
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert(!data.NextMarker);
|
||||||
|
assert.strictEqual(data.Contents.length, 4);
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||||
|
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
|
||||||
|
assert.strictEqual(data.Contents[2].key, 'pfx4-test-object');
|
||||||
|
assert.strictEqual(data.Contents[3].key, 'pfx4-test-object2');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return an empty list if no orphan delete marker starts with prefix pfx2', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
prefix: 'pfx2',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert(!data.NextMarker);
|
||||||
|
assert.strictEqual(data.Contents.length, 0);
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should list orphan delete markers that start with prefix pfx4', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
prefix: 'pfx4',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert(!data.NextMarker);
|
||||||
|
assert.strictEqual(data.Contents.length, 2);
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
|
||||||
|
assert.strictEqual(data.Contents[1].key, 'pfx4-test-object2');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the first orphan delete marker version that starts with prefix', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
prefix: 'pfx4',
|
||||||
|
maxKeys: 1,
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
assert.strictEqual(data.NextMarker, 'pfx4-test-object');
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the following orphan delete marker version that starts with prefix', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
marker: 'pfx4-test-object',
|
||||||
|
prefix: 'pfx4',
|
||||||
|
maxKeys: 1,
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert(!data.NextMarker);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object2');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the truncated list of orphan delete markers older than 1970-01-01T00:00:00.006Z', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
maxKeys: 2,
|
||||||
|
beforeDate: '1970-01-01T00:00:00.006Z',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.Contents.length, 2);
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||||
|
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
|
||||||
|
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the following list of orphan delete markers older than 1970-01-01T00:00:00.006Z', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
maxKeys: 2,
|
||||||
|
beforeDate: '1970-01-01T00:00:00.006Z',
|
||||||
|
marker: 'pfx3-test-object',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, false);
|
||||||
|
assert.strictEqual(data.Contents.length, 1);
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Should return the truncated list of orphan delete markers older than 1970-01-01T00:00:00.001Z', done => {
|
||||||
|
const params = {
|
||||||
|
listingType: 'DelimiterOrphanDeleteMarker',
|
||||||
|
maxKeys: 2,
|
||||||
|
beforeDate: '1970-01-01T00:00:00.001Z',
|
||||||
|
};
|
||||||
|
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||||
|
assert.deepStrictEqual(err, null);
|
||||||
|
assert.strictEqual(data.IsTruncated, true);
|
||||||
|
assert.strictEqual(data.Contents.length, 2);
|
||||||
|
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||||
|
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
|
||||||
|
assert.strictEqual(data.NextMarker, 'pfx3-test-object');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -17,15 +17,24 @@ const assert = require('assert');
|
||||||
*/
|
*/
|
||||||
function putBulkObjectVersions(metadata, bucketName, objName, objVal, params, versionNb, timestamp, logger, cb) {
|
function putBulkObjectVersions(metadata, bucketName, objName, objVal, params, versionNb, timestamp, logger, cb) {
|
||||||
let count = 0;
|
let count = 0;
|
||||||
|
const versionIds = [];
|
||||||
return async.whilst(
|
return async.whilst(
|
||||||
() => count < versionNb,
|
() => count < versionNb,
|
||||||
cbIterator => {
|
cbIterator => {
|
||||||
count++;
|
count++;
|
||||||
const lastModified = new Date(timestamp + count).toISOString();
|
const lastModified = new Date(timestamp + count).toISOString();
|
||||||
const finalObjectVal = Object.assign(objVal, { 'last-modified': lastModified });
|
const finalObjectVal = Object.assign(objVal, { 'last-modified': lastModified });
|
||||||
return metadata.putObjectMD(bucketName, objName, finalObjectVal, params,
|
return metadata.putObjectMD(bucketName, objName, finalObjectVal, params, logger, (err, data) => {
|
||||||
logger, cbIterator);
|
versionIds.push(JSON.parse(data).versionId);
|
||||||
}, cb);
|
return cbIterator(err, versionIds);
|
||||||
|
});
|
||||||
|
}, (err, expectedVersionIds) => {
|
||||||
|
// The last version is removed since it represents the current version.
|
||||||
|
expectedVersionIds.pop();
|
||||||
|
// array is reversed to be alligned with the version order (latest to oldest).
|
||||||
|
expectedVersionIds.reverse();
|
||||||
|
return cb(err, expectedVersionIds);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function makeBucketMD(bucketName) {
|
function makeBucketMD(bucketName) {
|
||||||
|
@ -63,6 +72,9 @@ function assertContents(contents, expected) {
|
||||||
assert.strictEqual(c.key, expected[i].key);
|
assert.strictEqual(c.key, expected[i].key);
|
||||||
assert.strictEqual(c.value.LastModified, expected[i].LastModified);
|
assert.strictEqual(c.value.LastModified, expected[i].LastModified);
|
||||||
assert.strictEqual(c.value.staleDate, expected[i].staleDate);
|
assert.strictEqual(c.value.staleDate, expected[i].staleDate);
|
||||||
|
if (expected[i].VersionId) {
|
||||||
|
assert.strictEqual(c.value.VersionId, expected[i].VersionId);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,8 @@ const VSConst =
|
||||||
require('../../../../lib/versioning/constants').VersioningConstants;
|
require('../../../../lib/versioning/constants').VersioningConstants;
|
||||||
const { DbPrefixes } = VSConst;
|
const { DbPrefixes } = VSConst;
|
||||||
|
|
||||||
|
const DELIMITER_TIMEOUT_MS = 10 * 1000; // 10s
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
const EmptyResult = {
|
const EmptyResult = {
|
||||||
Contents: [],
|
Contents: [],
|
||||||
|
@ -60,7 +62,9 @@ describe('DelimiterCurrent', () => {
|
||||||
const delimiter = new DelimiterCurrent({ prefix: 'prefix' }, fakeLogger, 'v1');
|
const delimiter = new DelimiterCurrent({ prefix: 'prefix' }, fakeLogger, 'v1');
|
||||||
|
|
||||||
const listingKey = makeV1Key('noprefix');
|
const listingKey = makeV1Key('noprefix');
|
||||||
assert.strictEqual(delimiter.filter({ key: listingKey, value: '' }), FILTER_SKIP);
|
const creationDate = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value = `{"last-modified": "${creationDate}"}`;
|
||||||
|
assert.strictEqual(delimiter.filter({ key: listingKey, value }), FILTER_SKIP);
|
||||||
|
|
||||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||||
});
|
});
|
||||||
|
@ -125,4 +129,132 @@ describe('DelimiterCurrent', () => {
|
||||||
|
|
||||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should return the object created before beforeDate', () => {
|
||||||
|
const beforeDate = '1970-01-01T00:00:00.003Z';
|
||||||
|
const delimiter = new DelimiterCurrent({ beforeDate }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const masterKey1 = 'key1';
|
||||||
|
const date1 = '1970-01-01T00:00:00.004Z';
|
||||||
|
const value1 = `{"last-modified": "${date1}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(masterKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
const masterKey2 = 'key2';
|
||||||
|
const date2 = '1970-01-01T00:00:00.000Z';
|
||||||
|
const value2 = `{"last-modified": "${date2}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(masterKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [
|
||||||
|
{
|
||||||
|
key: masterKey2,
|
||||||
|
value: value2,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
IsTruncated: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return the objects pushed before timeout', () => {
|
||||||
|
const beforeDate = '1970-01-01T00:00:00.003Z';
|
||||||
|
const delimiter = new DelimiterCurrent({ beforeDate }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const masterKey1 = 'key1';
|
||||||
|
const date1 = '1970-01-01T00:00:00.000Z';
|
||||||
|
const value1 = `{"last-modified": "${date1}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(masterKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
const masterKey2 = 'key2';
|
||||||
|
const date2 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value2 = `{"last-modified": "${date2}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(masterKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
delimiter.start = Date.now() - (DELIMITER_TIMEOUT_MS + 1);
|
||||||
|
|
||||||
|
const masterKey3 = 'key3';
|
||||||
|
const date3 = '1970-01-01T00:00:00.002Z';
|
||||||
|
const value3 = `{"last-modified": "${date3}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(masterKey3),
|
||||||
|
value: value3,
|
||||||
|
}), FILTER_END);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [
|
||||||
|
{
|
||||||
|
key: masterKey1,
|
||||||
|
value: value1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: masterKey2,
|
||||||
|
value: value2,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
NextMarker: masterKey2,
|
||||||
|
IsTruncated: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return empty content after timeout', () => {
|
||||||
|
const beforeDate = '1970-01-01T00:00:00.003Z';
|
||||||
|
const delimiter = new DelimiterCurrent({ beforeDate }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const masterKey1 = 'key1';
|
||||||
|
const date1 = '1970-01-01T00:00:00.004Z';
|
||||||
|
const value1 = `{"last-modified": "${date1}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(masterKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
const masterKey2 = 'key2';
|
||||||
|
const date2 = '1970-01-01T00:00:00.005Z';
|
||||||
|
const value2 = `{"last-modified": "${date2}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(masterKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
delimiter.start = Date.now() - (DELIMITER_TIMEOUT_MS + 1);
|
||||||
|
|
||||||
|
const masterKey3 = 'key3';
|
||||||
|
const date3 = '1970-01-01T00:00:00.006Z';
|
||||||
|
const value3 = `{"last-modified": "${date3}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(masterKey3),
|
||||||
|
value: value3,
|
||||||
|
}), FILTER_END);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [],
|
||||||
|
NextMarker: masterKey2,
|
||||||
|
IsTruncated: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -0,0 +1,372 @@
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
|
const assert = require('assert');
|
||||||
|
|
||||||
|
const DelimiterNonCurrent =
|
||||||
|
require('../../../../lib/algos/list/delimiterNonCurrent').DelimiterNonCurrent;
|
||||||
|
const {
|
||||||
|
FILTER_ACCEPT,
|
||||||
|
FILTER_SKIP,
|
||||||
|
FILTER_END,
|
||||||
|
} = require('../../../../lib/algos/list/tools');
|
||||||
|
const VSConst =
|
||||||
|
require('../../../../lib/versioning/constants').VersioningConstants;
|
||||||
|
const { DbPrefixes } = VSConst;
|
||||||
|
|
||||||
|
// TODO: find an acceptable timeout value.
|
||||||
|
const DELIMITER_TIMEOUT_MS = 10 * 1000; // 10s
|
||||||
|
|
||||||
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
const EmptyResult = {
|
||||||
|
Contents: [],
|
||||||
|
IsTruncated: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
const fakeLogger = {
|
||||||
|
trace: () => {},
|
||||||
|
debug: () => {},
|
||||||
|
info: () => {},
|
||||||
|
warn: () => {},
|
||||||
|
error: () => {},
|
||||||
|
fatal: () => {},
|
||||||
|
};
|
||||||
|
|
||||||
|
function makeV1Key(key) {
|
||||||
|
const keyPrefix = key.includes(VID_SEP) ?
|
||||||
|
DbPrefixes.Version : DbPrefixes.Master;
|
||||||
|
return `${keyPrefix}${key}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('DelimiterNonCurrent', () => {
|
||||||
|
it('should accept entry starting with prefix', () => {
|
||||||
|
const delimiter = new DelimiterNonCurrent({ prefix: 'prefix' }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const listingKey = makeV1Key('prefix1');
|
||||||
|
assert.strictEqual(delimiter.filter({ key: listingKey, value: '' }), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip entry not starting with prefix', () => {
|
||||||
|
const delimiter = new DelimiterNonCurrent({ prefix: 'prefix' }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const listingKey = makeV1Key('noprefix');
|
||||||
|
assert.strictEqual(delimiter.filter({ key: listingKey, value: '' }), FILTER_SKIP);
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept a version and return an empty content', () => {
|
||||||
|
const delimiter = new DelimiterNonCurrent({ }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const masterKey = 'key';
|
||||||
|
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept two versions and return the non current version', () => {
|
||||||
|
const delimiter = new DelimiterNonCurrent({ }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const masterKey = 'key';
|
||||||
|
|
||||||
|
// filter first version
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = '1970-01-01T00:00:00.002Z';
|
||||||
|
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter second version
|
||||||
|
const versionId2 = 'version2';
|
||||||
|
const versionKey2 = `${masterKey}${VID_SEP}${versionId2}`;
|
||||||
|
const date2 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value2 = `{"versionId":"${versionId2}", "last-modified": "${date2}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [
|
||||||
|
{
|
||||||
|
key: masterKey,
|
||||||
|
value: `{"versionId":"${versionId2}","last-modified":"${date2}","staleDate":"${date1}"}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
IsTruncated: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept three versions and return the non current version which stale date before beforeDate', () => {
|
||||||
|
const beforeDate = '1970-01-01T00:00:00.002Z';
|
||||||
|
const delimiter = new DelimiterNonCurrent({ beforeDate }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const masterKey = 'key';
|
||||||
|
|
||||||
|
// filter first version
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = beforeDate;
|
||||||
|
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter second version
|
||||||
|
const versionId2 = 'version2';
|
||||||
|
const versionKey2 = `${masterKey}${VID_SEP}${versionId2}`;
|
||||||
|
const date2 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value2 = `{"versionId":"${versionId2}", "last-modified": "${date2}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter third version
|
||||||
|
const versionId3 = 'version3';
|
||||||
|
const versionKey3 = `${masterKey}${VID_SEP}${versionId3}`;
|
||||||
|
const date3 = '1970-01-01T00:00:00.000Z';
|
||||||
|
const value3 = `{"versionId":"${versionId3}", "last-modified": "${date3}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey3),
|
||||||
|
value: value3,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [
|
||||||
|
{
|
||||||
|
key: masterKey,
|
||||||
|
value: `{"versionId":"${versionId3}","last-modified":"${date3}","staleDate":"${date2}"}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
IsTruncated: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept one delete marker and one versions and return the non current version', () => {
|
||||||
|
const delimiter = new DelimiterNonCurrent({ }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
// const version = new Version({ isDeleteMarker: true });
|
||||||
|
const masterKey = 'key';
|
||||||
|
|
||||||
|
// filter delete marker
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = '1970-01-01T00:00:00.002Z';
|
||||||
|
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}", "isDeleteMarker": true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter second version
|
||||||
|
const versionId2 = 'version2';
|
||||||
|
const versionKey2 = `${masterKey}${VID_SEP}${versionId2}`;
|
||||||
|
const date2 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value2 = `{"versionId":"${versionId2}", "last-modified": "${date2}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [
|
||||||
|
{
|
||||||
|
key: masterKey,
|
||||||
|
value: `{"versionId":"${versionId2}","last-modified":"${date2}","staleDate":"${date1}"}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
IsTruncated: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should end filtering if max keys reached', () => {
|
||||||
|
const delimiter = new DelimiterNonCurrent({ maxKeys: 1 }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const masterKey = 'key';
|
||||||
|
|
||||||
|
// filter delete marker
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = '1970-01-01T00:00:00.002Z';
|
||||||
|
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}", "isDeleteMarker": true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter second version
|
||||||
|
const versionId2 = 'version2';
|
||||||
|
const versionKey2 = `${masterKey}${VID_SEP}${versionId2}`;
|
||||||
|
const date2 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value2 = `{"versionId":"${versionId2}", "last-modified": "${date2}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter third version
|
||||||
|
const versionId3 = 'version3';
|
||||||
|
const versionKey3 = `${masterKey}${VID_SEP}${versionId3}`;
|
||||||
|
const date3 = '1970-01-01T00:00:00.000Z';
|
||||||
|
const value3 = `{"versionId":"${versionId3}", "last-modified": "${date3}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey3),
|
||||||
|
value: value3,
|
||||||
|
}), FILTER_END);
|
||||||
|
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [
|
||||||
|
{
|
||||||
|
key: masterKey,
|
||||||
|
value: `{"versionId":"${versionId2}","last-modified":"${date2}","staleDate":"${date1}"}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
IsTruncated: true,
|
||||||
|
NextKeyMarker: masterKey,
|
||||||
|
NextVersionIdMarker: versionId2,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should end filtering if delimiter timeout', () => {
|
||||||
|
const delimiter = new DelimiterNonCurrent({ }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const masterKey = 'key';
|
||||||
|
|
||||||
|
// filter delete marker
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = '1970-01-01T00:00:00.002Z';
|
||||||
|
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}", "isDeleteMarker": true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter second version
|
||||||
|
const versionId2 = 'version2';
|
||||||
|
const versionKey2 = `${masterKey}${VID_SEP}${versionId2}`;
|
||||||
|
const date2 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value2 = `{"versionId":"${versionId2}", "last-modified": "${date2}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// force delimiter to timeout.
|
||||||
|
delimiter.start = Date.now() - (DELIMITER_TIMEOUT_MS + 1);
|
||||||
|
|
||||||
|
// filter third version
|
||||||
|
const versionId3 = 'version3';
|
||||||
|
const versionKey3 = `${masterKey}${VID_SEP}${versionId3}`;
|
||||||
|
const date3 = '1970-01-01T00:00:00.000Z';
|
||||||
|
const value3 = `{"versionId":"${versionId3}", "last-modified": "${date3}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey3),
|
||||||
|
value: value3,
|
||||||
|
}), FILTER_END);
|
||||||
|
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [
|
||||||
|
{
|
||||||
|
key: masterKey,
|
||||||
|
value: `{"versionId":"${versionId2}","last-modified":"${date2}","staleDate":"${date1}"}`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
IsTruncated: true,
|
||||||
|
NextKeyMarker: masterKey,
|
||||||
|
NextVersionIdMarker: versionId2,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should end filtering if delimiter timeout with empty content', () => {
|
||||||
|
const delimiter = new DelimiterNonCurrent({ }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
// filter current version
|
||||||
|
const masterKey1 = 'key1';
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = '1970-01-01T00:00:00.002Z';
|
||||||
|
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}"`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter current version
|
||||||
|
const masterKey2 = 'key2';
|
||||||
|
const versionId2 = 'version2';
|
||||||
|
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||||
|
const date2 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value2 = `{"versionId":"${versionId2}", "last-modified": "${date2}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// force delimiter to timeout.
|
||||||
|
delimiter.start = Date.now() - (DELIMITER_TIMEOUT_MS + 1);
|
||||||
|
|
||||||
|
// filter current version
|
||||||
|
const masterKey3 = 'key3';
|
||||||
|
const versionId3 = 'version3';
|
||||||
|
const versionKey3 = `${masterKey3}${VID_SEP}${versionId3}`;
|
||||||
|
const date3 = '1970-01-01T00:00:00.000Z';
|
||||||
|
const value3 = `{"versionId":"${versionId3}", "last-modified": "${date3}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey3),
|
||||||
|
value: value3,
|
||||||
|
}), FILTER_END);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [],
|
||||||
|
IsTruncated: true,
|
||||||
|
NextKeyMarker: masterKey2,
|
||||||
|
NextVersionIdMarker: versionId2,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,378 @@
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
|
const assert = require('assert');
|
||||||
|
|
||||||
|
const DelimiterOrphanDeleteMarker =
|
||||||
|
require('../../../../lib/algos/list/delimiterOrphanDeleteMarker').DelimiterOrphanDeleteMarker;
|
||||||
|
const {
|
||||||
|
FILTER_ACCEPT,
|
||||||
|
FILTER_SKIP,
|
||||||
|
FILTER_END,
|
||||||
|
} = require('../../../../lib/algos/list/tools');
|
||||||
|
const VSConst =
|
||||||
|
require('../../../../lib/versioning/constants').VersioningConstants;
|
||||||
|
const { DbPrefixes } = VSConst;
|
||||||
|
|
||||||
|
// TODO: find an acceptable timeout value.
|
||||||
|
const DELIMITER_TIMEOUT_MS = 10 * 1000; // 10s
|
||||||
|
|
||||||
|
|
||||||
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
const EmptyResult = {
|
||||||
|
Contents: [],
|
||||||
|
IsTruncated: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
const fakeLogger = {
|
||||||
|
trace: () => {},
|
||||||
|
debug: () => {},
|
||||||
|
info: () => {},
|
||||||
|
warn: () => {},
|
||||||
|
error: () => {},
|
||||||
|
fatal: () => {},
|
||||||
|
};
|
||||||
|
|
||||||
|
function makeV1Key(key) {
|
||||||
|
const keyPrefix = key.includes(VID_SEP) ?
|
||||||
|
DbPrefixes.Version : DbPrefixes.Master;
|
||||||
|
return `${keyPrefix}${key}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('DelimiterOrphanDeleteMarker', () => {
|
||||||
|
it('should accept entry starting with prefix', () => {
|
||||||
|
const delimiter = new DelimiterOrphanDeleteMarker({ prefix: 'prefix' }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const listingKey = makeV1Key('prefix1');
|
||||||
|
assert.strictEqual(delimiter.filter({ key: listingKey, value: '' }), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should skip entry not starting with prefix', () => {
|
||||||
|
const delimiter = new DelimiterOrphanDeleteMarker({ prefix: 'prefix' }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const listingKey = makeV1Key('noprefix');
|
||||||
|
assert.strictEqual(delimiter.filter({ key: listingKey, value: '' }), FILTER_SKIP);
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept a version and return an empty content', () => {
|
||||||
|
const delimiter = new DelimiterOrphanDeleteMarker({ }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const masterKey = 'key';
|
||||||
|
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept an orphan delete marker and return it from the content', () => {
|
||||||
|
const delimiter = new DelimiterOrphanDeleteMarker({ }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
const masterKey = 'key';
|
||||||
|
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}","isDeleteMarker":true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [
|
||||||
|
{
|
||||||
|
key: masterKey,
|
||||||
|
value: value1,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
IsTruncated: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept two orphan delete markers and return them from the content', () => {
|
||||||
|
const delimiter = new DelimiterOrphanDeleteMarker({ }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
// filter the first orphan delete marker
|
||||||
|
const masterKey1 = 'key1';
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = '1970-01-01T00:00:00.002Z';
|
||||||
|
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}","isDeleteMarker":true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter the second orphan delete marker
|
||||||
|
const masterKey2 = 'key2';
|
||||||
|
const versionId2 = 'version2';
|
||||||
|
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||||
|
const date2 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value2 = `{"versionId":"${versionId2}","last-modified":"${date2}","isDeleteMarker":true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [
|
||||||
|
{
|
||||||
|
key: masterKey1,
|
||||||
|
value: value1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: masterKey2,
|
||||||
|
value: value2,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
IsTruncated: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept two orphan delete markers and return truncated content with one', () => {
|
||||||
|
const delimiter = new DelimiterOrphanDeleteMarker({ maxKeys: 1 }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
// filter the first orphan delete marker
|
||||||
|
const masterKey1 = 'key1';
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = '1970-01-01T00:00:00.002Z';
|
||||||
|
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}","isDeleteMarker":true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter the second orphan delete marker
|
||||||
|
const masterKey2 = 'key2';
|
||||||
|
const versionId2 = 'version2';
|
||||||
|
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||||
|
const date2 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value2 = `{"versionId":"${versionId2}","last-modified":"${date2}","isDeleteMarker":true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [
|
||||||
|
{
|
||||||
|
key: masterKey1,
|
||||||
|
value: value1,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
NextMarker: masterKey1,
|
||||||
|
IsTruncated: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept two orphan delete markers and return the one created before the beforeDate', () => {
|
||||||
|
const date1 = '1970-01-01T00:00:00.002Z';
|
||||||
|
const delimiter = new DelimiterOrphanDeleteMarker({ beforeDate: date1 }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
// filter the first orphan delete marker
|
||||||
|
const masterKey1 = 'key1';
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||||
|
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}","isDeleteMarker":true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter the second orphan delete marker
|
||||||
|
const masterKey2 = 'key2';
|
||||||
|
const versionId2 = 'version2';
|
||||||
|
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||||
|
const date2 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value2 = `{"versionId":"${versionId2}","last-modified":"${date2}","isDeleteMarker":true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [
|
||||||
|
{
|
||||||
|
key: masterKey2,
|
||||||
|
value: value2,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
IsTruncated: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should end filtering if max keys reached', () => {
|
||||||
|
const delimiter = new DelimiterOrphanDeleteMarker({ maxKeys: 1 }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
// filter the first orphan delete marker
|
||||||
|
const masterKey1 = 'key1';
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = '1970-01-01T00:00:00.002Z';
|
||||||
|
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}","isDeleteMarker":true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter the second orphan delete marker
|
||||||
|
const masterKey2 = 'key2';
|
||||||
|
const versionId2 = 'version2';
|
||||||
|
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||||
|
const date2 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value2 = `{"versionId":"${versionId2}","last-modified":"${date2}","isDeleteMarker":true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter the third orphan delete marker
|
||||||
|
const masterKey3 = 'key3';
|
||||||
|
const versionId3 = 'version3';
|
||||||
|
const versionKey3 = `${masterKey3}${VID_SEP}${versionId3}`;
|
||||||
|
const date3 = '1970-01-01T00:00:00.000Z';
|
||||||
|
const value3 = `{"versionId":"${versionId3}","last-modified":"${date3}","isDeleteMarker":true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey3),
|
||||||
|
value: value3,
|
||||||
|
}), FILTER_END);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [
|
||||||
|
{
|
||||||
|
key: masterKey1,
|
||||||
|
value: value1,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
NextMarker: masterKey1,
|
||||||
|
IsTruncated: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should end filtering if delimiter timeout', () => {
|
||||||
|
const delimiter = new DelimiterOrphanDeleteMarker({ }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
// filter the first orphan delete marker
|
||||||
|
const masterKey1 = 'key1';
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = '1970-01-01T00:00:00.002Z';
|
||||||
|
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}","isDeleteMarker":true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// force delimiter to timeout.
|
||||||
|
delimiter.start = Date.now() - (DELIMITER_TIMEOUT_MS + 1);
|
||||||
|
|
||||||
|
// filter the second orphan delete marker
|
||||||
|
const masterKey2 = 'key2';
|
||||||
|
const versionId2 = 'version2';
|
||||||
|
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||||
|
const date2 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value2 = `{"versionId":"${versionId2}","last-modified":"${date2}","isDeleteMarker":true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_END);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [
|
||||||
|
{
|
||||||
|
key: masterKey1,
|
||||||
|
value: value1,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
NextMarker: masterKey1,
|
||||||
|
IsTruncated: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should end filtering if delimiter timeout with empty content', () => {
|
||||||
|
const delimiter = new DelimiterOrphanDeleteMarker({ }, fakeLogger, 'v1');
|
||||||
|
|
||||||
|
// filter the first orphan delete marker
|
||||||
|
const masterKey1 = 'key1';
|
||||||
|
const versionId1 = 'version1';
|
||||||
|
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||||
|
const date1 = '1970-01-01T00:00:00.002Z';
|
||||||
|
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey1),
|
||||||
|
value: value1,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// filter the second orphan delete marker
|
||||||
|
const masterKey2 = 'key2';
|
||||||
|
const versionId2 = 'version2';
|
||||||
|
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||||
|
const date2 = '1970-01-01T00:00:00.001Z';
|
||||||
|
const value2 = `{"versionId":"${versionId2}","last-modified":"${date2}"}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey2),
|
||||||
|
value: value2,
|
||||||
|
}), FILTER_ACCEPT);
|
||||||
|
|
||||||
|
// force delimiter to timeout.
|
||||||
|
delimiter.start = Date.now() - (DELIMITER_TIMEOUT_MS + 1);
|
||||||
|
|
||||||
|
// filter the third orphan delete marker
|
||||||
|
const masterKey3 = 'key3';
|
||||||
|
const versionId3 = 'version3';
|
||||||
|
const versionKey3 = `${masterKey3}${VID_SEP}${versionId3}`;
|
||||||
|
const date3 = '1970-01-01T00:00:00.000Z';
|
||||||
|
const value3 = `{"versionId":"${versionId3}","last-modified":"${date3}","isDeleteMarker":true}`;
|
||||||
|
|
||||||
|
assert.strictEqual(delimiter.filter({
|
||||||
|
key: makeV1Key(versionKey3),
|
||||||
|
value: value3,
|
||||||
|
}), FILTER_END);
|
||||||
|
|
||||||
|
const expectedResult = {
|
||||||
|
Contents: [],
|
||||||
|
NextMarker: masterKey2,
|
||||||
|
IsTruncated: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||||
|
});
|
||||||
|
});
|
Loading…
Reference in New Issue