Compare commits
2 Commits
developmen
...
improvemen
Author | SHA1 | Date |
---|---|---|
Nicolas Humbert | 7c3e189df9 | |
Nicolas Humbert | c094de0f51 |
|
@ -0,0 +1,46 @@
|
|||
const { Delimiter } = require('./delimiter');
|
||||
import { ResultObject } from './types';
|
||||
|
||||
/**
|
||||
* Handle object listing with parameters. This extends the base class Delimiter
|
||||
* to return the master/current versions.
|
||||
*/
|
||||
class DelimiterCurrent extends Delimiter {
|
||||
/**
|
||||
* Delimiter listing of current versions.
|
||||
* @param {Object} parameters - listing parameters
|
||||
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
||||
* @param {RequestLogger} logger - The logger of the request
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
*/
|
||||
constructor(parameters, logger, vFormat) {
|
||||
super(parameters, logger, vFormat);
|
||||
|
||||
this.beforeDate = parameters.beforeDate;
|
||||
}
|
||||
|
||||
genMDParamsV1() {
|
||||
const params = super.genMDParamsV1();
|
||||
|
||||
if (this.beforeDate) {
|
||||
params.lastModified = {
|
||||
lt: this.beforeDate,
|
||||
};
|
||||
}
|
||||
return params;
|
||||
}
|
||||
|
||||
result(): ResultObject {
|
||||
const result: ResultObject = {
|
||||
Contents: this.Contents,
|
||||
IsTruncated: this.IsTruncated,
|
||||
};
|
||||
|
||||
if (this.IsTruncated) {
|
||||
result.NextKeyMarker = this.NextMarker;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
module.exports = { DelimiterCurrent };
|
|
@ -0,0 +1,201 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
const Delimiter = require('./delimiter').Delimiter;
|
||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||
const { inc, FILTER_ACCEPT, FILTER_END, SKIP_NONE } = require('./tools');
|
||||
const VID_SEP = VSConst.VersionId.Separator;
|
||||
const { DbPrefixes } = VSConst;
|
||||
|
||||
// TODO: find an acceptable timeout value.
|
||||
const DELIMITER_TIMEOUT_MS = 10 * 1000; // 10s
|
||||
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
||||
|
||||
/**
|
||||
* Handle object listing with parameters. This extends the base class Delimiter
|
||||
* to return the raw non-current versions objects.
|
||||
*/
|
||||
class DelimiterNonCurrent extends Delimiter {
|
||||
/**
|
||||
* Delimiter listing of non-current versions.
|
||||
* @param {Object} parameters - listing parameters
|
||||
* @param {String} parameters.versionIdMarker - version id marker
|
||||
* @param {String} parameters.beforeDate - limit the response to keys with stale date older than beforeDate
|
||||
* “stale date” is the date on when a version becomes non-current.
|
||||
* @param {String} parameters.keyMarker - key marker
|
||||
* @param {RequestLogger} logger - The logger of the request
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
*/
|
||||
constructor(parameters, logger, vFormat) {
|
||||
super(parameters, logger, vFormat);
|
||||
|
||||
this.versionIdMarker = parameters.versionIdMarker;
|
||||
this.beforeDate = parameters.beforeDate;
|
||||
this.keyMarker = parameters.keyMarker;
|
||||
this.NextKeyMarker = null;
|
||||
|
||||
this.skipping = this.skippingV1;
|
||||
this.genMDParams = this.genMDParamsV1;
|
||||
|
||||
this.keyName = null;
|
||||
this.staleDate = null;
|
||||
|
||||
// used for monitoring
|
||||
this.evaluatedKeys = 0;
|
||||
}
|
||||
|
||||
skippingV1() {
|
||||
return SKIP_NONE;
|
||||
}
|
||||
|
||||
genMDParamsV1() {
|
||||
const params = {
|
||||
gte: DbPrefixes.Version,
|
||||
lt: inc(DbPrefixes.Version),
|
||||
};
|
||||
|
||||
if (this.prefix) {
|
||||
params.gte = `${DbPrefixes.Version}${this.prefix}`;
|
||||
params.lt = `${DbPrefixes.Version}${inc(this.prefix)}`;
|
||||
}
|
||||
|
||||
if (this.keyMarker && `${DbPrefixes.Version}${this.keyMarker}` >= params.gte) {
|
||||
if (this.versionIdMarker) {
|
||||
// versionIdMarker should always come with keyMarker
|
||||
// but may not be the other way around
|
||||
// NOTE: gte is used to evaluate the "previous" versions if a versionId marker is specified.
|
||||
// This "previous"/"already evaluated" version will be used to retrieve the stale date and
|
||||
// skipped to not evaluate the same key twice in the addContents() method.
|
||||
params.gte = DbPrefixes.Version
|
||||
+ this.keyMarker
|
||||
+ VID_SEP
|
||||
+ this.versionIdMarker;
|
||||
} else {
|
||||
delete params.gte;
|
||||
params.gt = DbPrefixes.Version + inc(this.keyMarker + VID_SEP);
|
||||
}
|
||||
}
|
||||
|
||||
this.start = Date.now();
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
getLastModified(value) {
|
||||
let lastModified;
|
||||
try {
|
||||
const v = JSON.parse(value);
|
||||
lastModified = v['last-modified'];
|
||||
} catch (e) {
|
||||
this.logger.warn('could not parse Object Metadata while listing',
|
||||
{
|
||||
method: 'getLastModified',
|
||||
err: e.toString(),
|
||||
});
|
||||
}
|
||||
return lastModified;
|
||||
}
|
||||
|
||||
/**
|
||||
* NOTE: Each version of a specific key is sorted from the youngest to the oldest
|
||||
* thanks to the way version ids are generated.
|
||||
* DESCRIPTION: For a given key, the youngest version is skipped since it represents the current version.
|
||||
* The current last-modified date is kept in memory and used as a "stale date" for the following version.
|
||||
* The following version is pushed only if the "stale date" (picked up from the previous version)
|
||||
* is available (JSON.parse has not failed), if the "beforeDate" argument is specified, and
|
||||
* the "stale date" is older than the "beforeDate".
|
||||
* The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
||||
* the following version.
|
||||
* The process stops and returns the available results if either:
|
||||
* - no more metadata key is left to be processed
|
||||
* - the listing reaches the maximum number of key to be returned
|
||||
* - the internal timeout is reached
|
||||
* @param {String} keyVersionSuffix - The key to add
|
||||
* @param {String} value - The value of the key
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
addContents(keyVersionSuffix, value) {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
}
|
||||
|
||||
if (this.start && Date.now() - this.start > DELIMITER_TIMEOUT_MS) {
|
||||
this.IsTruncated = true;
|
||||
this.logger.info('listing stopped after expected internal timeout',
|
||||
{
|
||||
timeoutMs: DELIMITER_TIMEOUT_MS,
|
||||
evaluatedKeys: this.evaluatedKeys,
|
||||
});
|
||||
return FILTER_END;
|
||||
}
|
||||
++this.evaluatedKeys;
|
||||
|
||||
const versionIdIndex = keyVersionSuffix.indexOf(VID_SEP);
|
||||
const key = keyVersionSuffix.slice(0, versionIdIndex);
|
||||
const versionId = keyVersionSuffix.slice(versionIdIndex + 1);
|
||||
|
||||
this.NextKeyMarker = key;
|
||||
this.NextVersionIdMarker = versionId;
|
||||
|
||||
// For a given key, the youngest version is skipped since it represents the current version.
|
||||
const isYoungestVersion = key !== this.keyName;
|
||||
|
||||
if (isYoungestVersion) {
|
||||
this.keyName = key;
|
||||
// The current last-modified date is kept in memory and used as a "stale date" for the following version.
|
||||
this.staleDate = this.getLastModified(value);
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
// The following version is pushed only if the "stale date" (picked up from the previous version)
|
||||
// is available (JSON.parse has not failed) and, if the "beforeDate" argument is specified,
|
||||
// the "stale date" is older than the "beforeDate".
|
||||
if (this.staleDate && (!this.beforeDate || this.staleDate < this.beforeDate)) {
|
||||
this.Contents.push({ key, value: this.trimMetadataAddStaleDate(value, this.staleDate) });
|
||||
++this.keys;
|
||||
}
|
||||
|
||||
// The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
||||
// the following version.
|
||||
this.staleDate = this.getLastModified(value);
|
||||
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
trimMetadataAddStaleDate(value, staleDate) {
|
||||
let ret = undefined;
|
||||
try {
|
||||
ret = JSON.parse(value);
|
||||
ret.staleDate = staleDate;
|
||||
if (value.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
||||
delete ret.location;
|
||||
}
|
||||
ret = JSON.stringify(ret);
|
||||
} catch (e) {
|
||||
// Prefer returning an unfiltered data rather than
|
||||
// stopping the service in case of parsing failure.
|
||||
// The risk of this approach is a potential
|
||||
// reproduction of MD-692, where too much memory is
|
||||
// used by repd.
|
||||
this.logger.warn('could not parse Object Metadata while listing',
|
||||
{
|
||||
method: 'trimMetadataAddStaleDate',
|
||||
err: e.toString(),
|
||||
});
|
||||
}
|
||||
return ret || value;
|
||||
}
|
||||
|
||||
result() {
|
||||
const result = {
|
||||
Contents: this.Contents,
|
||||
IsTruncated: this.IsTruncated,
|
||||
};
|
||||
|
||||
if (this.IsTruncated) {
|
||||
result.NextKeyMarker = this.NextKeyMarker;
|
||||
result.NextVersionIdMarker = this.NextVersionIdMarker;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
module.exports = { DelimiterNonCurrent };
|
|
@ -0,0 +1,179 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
const Delimiter = require('./delimiter').Delimiter;
|
||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||
const { inc, FILTER_ACCEPT, FILTER_END, SKIP_NONE } = require('./tools');
|
||||
const VID_SEP = VSConst.VersionId.Separator;
|
||||
const { DbPrefixes } = VSConst;
|
||||
|
||||
const DELIMITER_TIMEOUT_MS = 10 * 1000; // 10s
|
||||
|
||||
/**
|
||||
* Handle object listing with parameters. This extends the base class Delimiter
|
||||
* to return the orphan delete markers. Orphan delete markers are also
|
||||
* refered as expired object delete marker.
|
||||
* They are delete marker with zero noncurrent versions.
|
||||
*/
|
||||
class DelimiterOrphan extends Delimiter {
|
||||
/**
|
||||
* Delimiter listing of non-current versions.
|
||||
* @param {Object} parameters - listing parameters
|
||||
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
||||
* @param {String} parameters.keyMarker - key marker
|
||||
* @param {RequestLogger} logger - The logger of the request
|
||||
* @param {String} [vFormat] - versioning key format
|
||||
*/
|
||||
constructor(parameters, logger, vFormat) {
|
||||
super(parameters, logger, vFormat);
|
||||
|
||||
this.beforeDate = parameters.beforeDate;
|
||||
this.keyMarker = parameters.keyMarker;
|
||||
this.NextKeyMarker = null;
|
||||
|
||||
this.skipping = this.skippingV1;
|
||||
this.genMDParams = this.genMDParamsV1;
|
||||
|
||||
this.keyName = null;
|
||||
this.staleDate = null;
|
||||
|
||||
// used for monitoring
|
||||
this.evaluatedKeys = 0;
|
||||
}
|
||||
|
||||
skippingV1() {
|
||||
return SKIP_NONE;
|
||||
}
|
||||
|
||||
_reachedMaxKeys() {
|
||||
if (this.keys >= this.maxKeys) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
genMDParamsV1() {
|
||||
const params = {
|
||||
gte: DbPrefixes.Version,
|
||||
lt: inc(DbPrefixes.Version),
|
||||
};
|
||||
|
||||
if (this.prefix) {
|
||||
params.gte = `${DbPrefixes.Version}${this.prefix}`;
|
||||
params.lt = `${DbPrefixes.Version}${inc(this.prefix)}`;
|
||||
}
|
||||
|
||||
if (this.keyMarker && `${DbPrefixes.Version}${this.keyMarker}` >= params.gte) {
|
||||
delete params.gte;
|
||||
params.gt = DbPrefixes.Version
|
||||
+ inc(this.keyMarker + VID_SEP);
|
||||
}
|
||||
|
||||
this.start = Date.now();
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
_addOrphan() {
|
||||
if (this.value) {
|
||||
let lastModified;
|
||||
let isDeleteMarker = false;
|
||||
try {
|
||||
const v = JSON.parse(this.value);
|
||||
|
||||
lastModified = v['last-modified'];
|
||||
isDeleteMarker = v.isDeleteMarker;
|
||||
} catch (e) {
|
||||
this.logger.warn('could not parse Object Metadata while listing',
|
||||
{
|
||||
method: 'getLastModified',
|
||||
err: e.toString(),
|
||||
});
|
||||
}
|
||||
// We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
||||
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) && isDeleteMarker) {
|
||||
this.Contents.push({ key: this.keyName, value: this.trimMetadata(this.value) });
|
||||
this.NextKeyMarker = this.keyName;
|
||||
++this.keys;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* NOTE: Each version of a specific key is sorted from the youngest to the oldest
|
||||
* thanks to the way version ids are generated.
|
||||
* DESCRIPTION: For a given key, the youngest version is kept in memory since it is the current version.
|
||||
* If the following version reference a new key, it means that the previous one was an orphan version.
|
||||
* We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
||||
* The process stops and returns the available results if either:
|
||||
* - no more metadata key is left to be processed
|
||||
* - the listing reaches the maximum number of key to be returned
|
||||
* - the internal timeout is reached
|
||||
* NOTE: we cannot leverage MongoDB to list keys older than "beforeDate"
|
||||
* because then we will not be able to assess its orphanage.
|
||||
* @param {String} keyVersionSuffix - The key with version id as a suffix.
|
||||
* @param {String} value - The value of the key
|
||||
* @return {number} - indicates if iteration should continue
|
||||
*/
|
||||
addContents(keyVersionSuffix, value) {
|
||||
if (this._reachedMaxKeys()) {
|
||||
return FILTER_END;
|
||||
}
|
||||
|
||||
if (this.start && Date.now() - this.start > DELIMITER_TIMEOUT_MS) {
|
||||
this.IsTruncated = true;
|
||||
this.NextKeyMarker = this.keyName;
|
||||
|
||||
this.logger.info('listing stopped after expected internal timeout',
|
||||
{
|
||||
timeoutMs: DELIMITER_TIMEOUT_MS,
|
||||
evaluatedKeys: this.evaluatedKeys,
|
||||
});
|
||||
return FILTER_END;
|
||||
}
|
||||
++this.evaluatedKeys;
|
||||
|
||||
const versionIdIndex = keyVersionSuffix.indexOf(VID_SEP);
|
||||
// key without version suffix
|
||||
const key = keyVersionSuffix.slice(0, versionIdIndex);
|
||||
|
||||
|
||||
// For a given key, the youngest version is kept in memory since it represents the current version.
|
||||
if (key !== this.keyName) {
|
||||
// If the following version reference a new key,
|
||||
// it means that the previous one (this.value) was an orphan version.
|
||||
this._addOrphan();
|
||||
this.keyName = key;
|
||||
this.value = value;
|
||||
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
this.keyName = key;
|
||||
this.value = null;
|
||||
|
||||
return FILTER_ACCEPT;
|
||||
}
|
||||
|
||||
result() {
|
||||
// The following check makes sure the last orphan delete marker is not forgotten.
|
||||
if (this.keys < this.maxKeys) {
|
||||
this._addOrphan();
|
||||
// The following make sure that if makeKeys is reached, isTruncated is set to true.
|
||||
// We moved the "isTruncated" from _reachedMaxKeys to make sure we take into account the last entity
|
||||
// if listing is truncated right before the last entity and the last entity is a orphan delete marker.
|
||||
} else {
|
||||
this.IsTruncated = this.maxKeys > 0;
|
||||
}
|
||||
|
||||
const result = {
|
||||
Contents: this.Contents,
|
||||
IsTruncated: this.IsTruncated,
|
||||
};
|
||||
|
||||
if (this.IsTruncated) {
|
||||
result.NextKeyMarker = this.NextKeyMarker;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
module.exports = { DelimiterOrphan };
|
|
@ -6,4 +6,7 @@ module.exports = {
|
|||
DelimiterMaster: require('./delimiterMaster')
|
||||
.DelimiterMaster,
|
||||
MPU: require('./MPU').MultipartUploads,
|
||||
DelimiterCurrent: require('./delimiterCurrent').DelimiterCurrent,
|
||||
DelimiterNonCurrent: require('./delimiterNonCurrent').DelimiterNonCurrent,
|
||||
DelimiterOrphan: require('./delimiterOrphan').DelimiterOrphan,
|
||||
};
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
export type ResultObject = {
|
||||
Contents: {
|
||||
key: string;
|
||||
value: string;
|
||||
}[];
|
||||
IsTruncated: boolean;
|
||||
NextKeyMarker ?: string;
|
||||
};
|
|
@ -51,6 +51,29 @@ function _parseListEntries(entries) {
|
|||
});
|
||||
}
|
||||
|
||||
function _parseLifecycleListEntries(entries) {
|
||||
return entries.map(entry => {
|
||||
const tmp = JSON.parse(entry.value);
|
||||
return {
|
||||
key: entry.key,
|
||||
value: {
|
||||
Size: tmp['content-length'],
|
||||
ETag: tmp['content-md5'],
|
||||
VersionId: tmp.versionId,
|
||||
LastModified: tmp['last-modified'],
|
||||
Owner: {
|
||||
DisplayName: tmp['owner-display-name'],
|
||||
ID: tmp['owner-id'],
|
||||
},
|
||||
StorageClass: tmp['x-amz-storage-class'],
|
||||
tags: tmp.tags,
|
||||
staleDate: tmp.staleDate,
|
||||
dataStoreName: tmp.dataStoreName,
|
||||
},
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/** parseListEntries - parse the values returned in a listing by metadata
|
||||
* @param {object[]} entries - Version or Content entries in a metadata listing
|
||||
* @param {string} entries[].key - metadata key
|
||||
|
@ -279,6 +302,29 @@ class MetadataWrapper {
|
|||
});
|
||||
}
|
||||
|
||||
listLifecycleObject(bucketName, listingParams, log, cb) {
|
||||
this.client.listLifecycleObject(bucketName, listingParams, log, (err, data) => {
|
||||
log.debug('getting object listing for lifecycle from metadata');
|
||||
if (err) {
|
||||
log.error('error from metadata', { implName: this.implName,
|
||||
err });
|
||||
return cb(err);
|
||||
}
|
||||
log.debug('object listing retrieved from metadata');
|
||||
// eslint-disable-next-line
|
||||
data.Contents = parseListEntries(data.Contents, _parseLifecycleListEntries);
|
||||
if (data.Contents instanceof Error) {
|
||||
log.error('error parsing metadata listing', {
|
||||
error: data.Contents,
|
||||
listingType: listingParams.listingType,
|
||||
method: 'listLifecycleObject',
|
||||
});
|
||||
return cb(errors.InternalError);
|
||||
}
|
||||
return cb(null, data);
|
||||
});
|
||||
}
|
||||
|
||||
listMultipartUploads(bucketName, listingParams, log, cb) {
|
||||
this.client.listMultipartUploads(bucketName, listingParams, log,
|
||||
(err, data) => {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
const assert = require('assert');
|
||||
const errors = require('../../../errors').default;
|
||||
|
||||
const BucketInfo = require('../../../models/BucketInfo').default;
|
||||
|
||||
|
@ -121,6 +122,10 @@ class BucketClientInterface {
|
|||
return null;
|
||||
}
|
||||
|
||||
listLifecycleObject(bucketName, params, log, cb) {
|
||||
return process.nextTick(cb, errors.NotImplemented);
|
||||
}
|
||||
|
||||
_analyzeHealthFailure(log, callback) {
|
||||
let doFail = false;
|
||||
const reason = {
|
||||
|
|
|
@ -329,6 +329,10 @@ class BucketFileInterface {
|
|||
return this.internalListObject(bucketName, params, log, cb);
|
||||
}
|
||||
|
||||
listLifecycleObject(bucketName, params, log, cb) {
|
||||
return process.nextTick(cb, errors.NotImplemented);
|
||||
}
|
||||
|
||||
getUUID(log, cb) {
|
||||
return this.mdDB.getUUID(cb);
|
||||
}
|
||||
|
|
|
@ -318,6 +318,10 @@ const metastore = {
|
|||
});
|
||||
},
|
||||
|
||||
listLifecycleObject(bucketName, params, log, cb) {
|
||||
return process.nextTick(cb, errors.NotImplemented);
|
||||
},
|
||||
|
||||
listMultipartUploads(bucketName, listingParams, log, cb) {
|
||||
process.nextTick(() => {
|
||||
metastore.getBucketAttributes(bucketName, log, (err, bucket) => {
|
||||
|
|
|
@ -1650,6 +1650,31 @@ class MongoClientInterface {
|
|||
});
|
||||
}
|
||||
|
||||
listLifecycleObject(bucketName, params, log, cb) {
|
||||
return this.getBucketVFormat(bucketName, log, (err, vFormat) => {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
}
|
||||
|
||||
if (vFormat !== BUCKET_VERSIONS.v1) {
|
||||
log.error('not supported bucket format version',
|
||||
{ method: 'listLifecycleObject', bucket: bucketName, vFormat });
|
||||
return cb(errors.InternalError.customizeDescription('Not supported bucket format version'));
|
||||
}
|
||||
|
||||
const extName = params.listingType;
|
||||
|
||||
const extension = new listAlgos[extName](params, log, vFormat);
|
||||
const mainStreamParams = extension.genMDParams();
|
||||
|
||||
const internalParams = {
|
||||
mainStreamParams,
|
||||
};
|
||||
|
||||
return this.internalListObject(bucketName, internalParams, extension, vFormat, log, cb);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* lists versionned and non versionned objects in a bucket
|
||||
* @param {String} bucketName bucket name
|
||||
|
|
|
@ -55,6 +55,14 @@ class MongoReadStream extends Readable {
|
|||
}
|
||||
}
|
||||
|
||||
if (options.lastModified) {
|
||||
query['value.last-modified'] = {};
|
||||
|
||||
if (options.lastModified.lt) {
|
||||
query['value.last-modified'].$lt = options.lastModified.lt;
|
||||
}
|
||||
}
|
||||
|
||||
if (!Object.keys(query._id).length) {
|
||||
delete query._id;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,414 @@
|
|||
const async = require('async');
|
||||
const assert = require('assert');
|
||||
const werelogs = require('werelogs');
|
||||
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||
const MetadataWrapper =
|
||||
require('../../../../../lib/storage/metadata/MetadataWrapper');
|
||||
const { versioning } = require('../../../../../index');
|
||||
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||
const { flagObjectForDeletion, makeBucketMD, putBulkObjectVersions } = require('./utils');
|
||||
|
||||
const IMPL_NAME = 'mongodb';
|
||||
const DB_NAME = 'metadata';
|
||||
const BUCKET_NAME = 'test-lifecycle-list-current-bucket';
|
||||
|
||||
const mongoserver = new MongoMemoryReplSet({
|
||||
debug: false,
|
||||
instanceOpts: [
|
||||
{ port: 27020 },
|
||||
],
|
||||
replSet: {
|
||||
name: 'rs0',
|
||||
count: 1,
|
||||
DB_NAME,
|
||||
storageEngine: 'ephemeralForTest',
|
||||
},
|
||||
});
|
||||
|
||||
describe('MongoClientInterface::metadata.listLifecycleObject::current', () => {
|
||||
let metadata;
|
||||
let collection;
|
||||
|
||||
beforeAll(done => {
|
||||
mongoserver.waitUntilRunning().then(() => {
|
||||
const opts = {
|
||||
mongodb: {
|
||||
replicaSetHosts: 'localhost:27020',
|
||||
writeConcern: 'majority',
|
||||
replicaSet: 'rs0',
|
||||
readPreference: 'primary',
|
||||
database: DB_NAME,
|
||||
},
|
||||
};
|
||||
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||
metadata.client.defaultBucketKeyFormat = BucketVersioningKeyFormat.v1;
|
||||
metadata.setup(done);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
async.series([
|
||||
next => metadata.close(next),
|
||||
next => mongoserver.stop()
|
||||
.then(() => next())
|
||||
.catch(next),
|
||||
], done);
|
||||
});
|
||||
|
||||
beforeEach(done => {
|
||||
const bucketMD = makeBucketMD(BUCKET_NAME);
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
async.series([
|
||||
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
collection = metadata.client.getCollection(BUCKET_NAME);
|
||||
return next();
|
||||
}),
|
||||
next => {
|
||||
const params = {
|
||||
objName: 'pfx1-test-object',
|
||||
objVal: {
|
||||
key: 'pfx1-test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
nbVersions: 5,
|
||||
};
|
||||
const timestamp = 0;
|
||||
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||
params.nbVersions, timestamp, logger, next);
|
||||
/* eslint-disable max-len */
|
||||
// The following versions are created:
|
||||
// { "_id" : "Mpfx1-test-object", "value" : { "key" : "pfx1-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||
// { "_id" : "Vpfx1-test-object{sep}id4", "value" : { "key" : "pfx1-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||
// { "_id" : "Vpfx1-test-object{sep}id3", "value" : { "key" : "pfx1-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:00.004Z" } }
|
||||
// { "_id" : "Vpfx1-test-object{sep}id2", "value" : { "key" : "pfx1-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:00.003Z" } }
|
||||
// { "_id" : "Vpfx1-test-object{sep}id1", "value" : { "key" : "pfx1-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:00.002Z" } }
|
||||
// { "_id" : "Vpfx1-test-object{sep}id0", "value" : { "key" : "pfx1-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||
/* eslint-enable max-len */
|
||||
},
|
||||
next => {
|
||||
const params = {
|
||||
objName: 'pfx2-test-object',
|
||||
objVal: {
|
||||
key: 'pfx2-test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
nbVersions: 5,
|
||||
};
|
||||
const timestamp = 2000;
|
||||
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||
params.nbVersions, timestamp, logger, next);
|
||||
/* eslint-disable max-len */
|
||||
// The following versions are created:
|
||||
// { "_id" : "Mpfx2-test-object", "value" : { "key" : "pfx2-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:02.005Z" } }
|
||||
// { "_id" : "Vpfx2-test-object{sep}id4", "value" : { "key" : "pfx2-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:02.005Z" } }
|
||||
// { "_id" : "Vpfx2-test-object{sep}id3", "value" : { "key" : "pfx2-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:02.004Z" } }
|
||||
// { "_id" : "Vpfx2-test-object{sep}id2", "value" : { "key" : "pfx2-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:02.003Z" } }
|
||||
// { "_id" : "Vpfx2-test-object{sep}id1", "value" : { "key" : "pfx2-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:02.002Z" } }
|
||||
// { "_id" : "Vpfx1-test-object{sep}id0", "value" : { "key" : "pfx2-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:02.001Z" } }
|
||||
/* eslint-enable max-len */
|
||||
},
|
||||
next => {
|
||||
const params = {
|
||||
objName: 'pfx3-test-object',
|
||||
objVal: {
|
||||
key: 'pfx3-test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
nbVersions: 5,
|
||||
};
|
||||
const timestamp = 1000;
|
||||
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||
params.nbVersions, timestamp, logger, next);
|
||||
/* eslint-disable max-len */
|
||||
// The following versions are created:
|
||||
// { "_id" : "Mpfx3-test-object", "value" : { "key" : "pfx3-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:01.005Z" } }
|
||||
// { "_id" : "Vpfx3-test-object{sep}id4", "value" : { "key" : "pfx3-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:01.005Z" } }
|
||||
// { "_id" : "Vpfx3-test-object{sep}id3", "value" : { "key" : "pfx3-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:01.004Z" } }
|
||||
// { "_id" : "Vpfx3-test-object{sep}id2", "value" : { "key" : "pfx3-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:01.003Z" } }
|
||||
// { "_id" : "Vpfx3-test-object{sep}id1", "value" : { "key" : "pfx3-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:01.002Z" } }
|
||||
// { "_id" : "Vpfx3-test-object{sep}id0", "value" : { "key" : "pfx3-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:01.001Z" } }
|
||||
/* eslint-enable max-len */
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
afterEach(done => {
|
||||
metadata.deleteBucket(BUCKET_NAME, logger, done);
|
||||
});
|
||||
|
||||
it('Should list current versions of objects', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 3);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx2-test-object');
|
||||
assert.strictEqual(data.Contents[2].key, 'pfx3-test-object');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return empty list when beforeDate is before the objects creation date', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
beforeDate: '1970-01-01T00:00:00.000Z',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 0);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the current version modified before 1970-01-01T00:00:00.010Z', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
beforeDate: '1970-01-01T00:00:00.10Z',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the current versions modified before 1970-01-01T00:00:01.010Z', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
beforeDate: '1970-01-01T00:00:01.010Z',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 2);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the current versions modified before 1970-01-01T00:00:02.010Z', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
beforeDate: '1970-01-01T00:00:02.010Z',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 3);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx2-test-object');
|
||||
assert.strictEqual(data.Contents[2].key, 'pfx3-test-object');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should truncate the list of current versions modified before 1970-01-01T00:00:01.010Z', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
beforeDate: '1970-01-01T00:00:01.010Z',
|
||||
maxKeys: 1,
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx1-test-object');
|
||||
|
||||
params.marker = 'pfx1-test-object';
|
||||
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx3-test-object');
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('Should truncate list of current versions of objects', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
maxKeys: 2,
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx2-test-object');
|
||||
assert.strictEqual(data.Contents.length, 2);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx2-test-object');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should list the following current versions of objects', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
marker: 'pfx2-test-object',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx3-test-object');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should list current versions that start with prefix', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
prefix: 'pfx2',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx2-test-object');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the list of current versions modified before 1970-01-01T00:00:01.010Z with prefix pfx1', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
beforeDate: '1970-01-01T00:00:01.010Z',
|
||||
maxKeys: 1,
|
||||
prefix: 'pfx1',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return an Internal error when bucket non existent (current)', done => {
|
||||
const bucketName = 'non-existent-bucket';
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
};
|
||||
return metadata.listLifecycleObject(bucketName, params, logger, (err, data) => {
|
||||
expect(err.InternalError).toBe(true);
|
||||
assert(!data);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should not list deleted object', done => {
|
||||
const objVal = {
|
||||
'key': 'pfx4-test-object',
|
||||
'last-modified': new Date(0).toISOString(),
|
||||
};
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
};
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
};
|
||||
async.series([
|
||||
next => metadata.putObjectMD(BUCKET_NAME, objVal.key, objVal, versionParams,
|
||||
logger, next),
|
||||
next => metadata.deleteObjectMD(BUCKET_NAME, objVal.key, null, logger, next),
|
||||
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(data.Contents.length, 3);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx2-test-object');
|
||||
assert.strictEqual(data.Contents[2].key, 'pfx3-test-object');
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Should not list phd master key when listing current versions', done => {
|
||||
const objVal = {
|
||||
'key': 'pfx4-test-object',
|
||||
'versionId': 'null',
|
||||
'last-modified': new Date(0).toISOString(),
|
||||
};
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
};
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
prefix: 'pfx4',
|
||||
};
|
||||
let versionId;
|
||||
let lastVersionId;
|
||||
async.series([
|
||||
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||
logger, (err, res) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
versionId = JSON.parse(res).versionId;
|
||||
return next(null);
|
||||
}),
|
||||
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||
logger, (err, res) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
lastVersionId = JSON.parse(res).versionId;
|
||||
return next(null);
|
||||
}),
|
||||
next => metadata.deleteObjectMD(BUCKET_NAME, 'pfx4-test-object', { versionId: lastVersionId },
|
||||
logger, next),
|
||||
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(data.Contents[0].value.VersionId, versionId);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Should not list the current version tagged for deletion', done => {
|
||||
const objVal = {
|
||||
'key': 'pfx4-test-object',
|
||||
'last-modified': new Date(0).toISOString(),
|
||||
};
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
};
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
};
|
||||
async.series([
|
||||
next => metadata.putObjectMD(BUCKET_NAME, objVal.key, objVal, versionParams,
|
||||
logger, next),
|
||||
next => flagObjectForDeletion(collection, objVal.key, next),
|
||||
next => metadata.listObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(data.Contents.length, 3);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx1-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx2-test-object');
|
||||
assert.strictEqual(data.Contents[2].key, 'pfx3-test-object');
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,102 @@
|
|||
const async = require('async');
|
||||
const assert = require('assert');
|
||||
const werelogs = require('werelogs');
|
||||
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||
const MetadataWrapper =
|
||||
require('../../../../../lib/storage/metadata/MetadataWrapper');
|
||||
const { versioning } = require('../../../../../index');
|
||||
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||
const { makeBucketMD } = require('./utils');
|
||||
|
||||
const IMPL_NAME = 'mongodb';
|
||||
const DB_NAME = 'metadata';
|
||||
const BUCKET_NAME = 'test-lifecycle-list-bucket-v0';
|
||||
|
||||
const mongoserver = new MongoMemoryReplSet({
|
||||
debug: false,
|
||||
instanceOpts: [
|
||||
{ port: 27020 },
|
||||
],
|
||||
replSet: {
|
||||
name: 'rs0',
|
||||
count: 1,
|
||||
DB_NAME,
|
||||
storageEngine: 'ephemeralForTest',
|
||||
},
|
||||
});
|
||||
|
||||
describe('MongoClientInterface::metadata.listLifecycleObject::global', () => {
|
||||
let metadata;
|
||||
|
||||
beforeAll(done => {
|
||||
mongoserver.waitUntilRunning().then(() => {
|
||||
const opts = {
|
||||
mongodb: {
|
||||
replicaSetHosts: 'localhost:27020',
|
||||
writeConcern: 'majority',
|
||||
replicaSet: 'rs0',
|
||||
readPreference: 'primary',
|
||||
database: DB_NAME,
|
||||
},
|
||||
};
|
||||
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||
metadata.client.defaultBucketKeyFormat = BucketVersioningKeyFormat.v0;
|
||||
metadata.setup(done);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
async.series([
|
||||
next => metadata.close(next),
|
||||
next => mongoserver.stop()
|
||||
.then(() => next())
|
||||
.catch(next),
|
||||
], done);
|
||||
});
|
||||
|
||||
beforeEach(done => {
|
||||
const bucketMD = makeBucketMD(BUCKET_NAME);
|
||||
return metadata.createBucket(BUCKET_NAME, bucketMD, logger, done);
|
||||
});
|
||||
|
||||
afterEach(done => {
|
||||
metadata.deleteBucket(BUCKET_NAME, logger, done);
|
||||
});
|
||||
|
||||
it('Should return error listing current versions if v0 key format', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
expect(err.InternalError).toBe(true);
|
||||
assert(!data);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return error listing non-current versions if v0 key format', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
expect(err.InternalError).toBe(true);
|
||||
assert(!data);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return error listing orphans delete markers if v0 key format', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
expect(err.InternalError).toBe(true);
|
||||
assert(!data);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,778 @@
|
|||
const async = require('async');
|
||||
const assert = require('assert');
|
||||
const werelogs = require('werelogs');
|
||||
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||
const MetadataWrapper =
|
||||
require('../../../../../lib/storage/metadata/MetadataWrapper');
|
||||
const { versioning } = require('../../../../../index');
|
||||
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||
const { assertContents, makeBucketMD, putBulkObjectVersions, flagObjectForDeletion } = require('./utils');
|
||||
|
||||
const IMPL_NAME = 'mongodb';
|
||||
const DB_NAME = 'metadata';
|
||||
const BUCKET_NAME = 'test-lifecycle-list-non-current-bucket';
|
||||
|
||||
const mongoserver = new MongoMemoryReplSet({
|
||||
debug: false,
|
||||
instanceOpts: [
|
||||
{ port: 27020 },
|
||||
],
|
||||
replSet: {
|
||||
name: 'rs0',
|
||||
count: 1,
|
||||
DB_NAME,
|
||||
storageEngine: 'ephemeralForTest',
|
||||
},
|
||||
});
|
||||
|
||||
describe('MongoClientInterface::metadata.listLifecycleObject::noncurrent', () => {
|
||||
let metadata;
|
||||
let collection;
|
||||
|
||||
beforeAll(done => {
|
||||
mongoserver.waitUntilRunning().then(() => {
|
||||
const opts = {
|
||||
mongodb: {
|
||||
replicaSetHosts: 'localhost:27020',
|
||||
writeConcern: 'majority',
|
||||
replicaSet: 'rs0',
|
||||
readPreference: 'primary',
|
||||
database: DB_NAME,
|
||||
},
|
||||
};
|
||||
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||
metadata.client.defaultBucketKeyFormat = BucketVersioningKeyFormat.v1;
|
||||
metadata.setup(done);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
async.series([
|
||||
next => metadata.close(next),
|
||||
next => mongoserver.stop()
|
||||
.then(() => next())
|
||||
.catch(next),
|
||||
], done);
|
||||
});
|
||||
|
||||
beforeEach(done => {
|
||||
const bucketMD = makeBucketMD(BUCKET_NAME);
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
async.series([
|
||||
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, err => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
collection = metadata.client.getCollection(BUCKET_NAME);
|
||||
return next();
|
||||
}),
|
||||
next => {
|
||||
const params = {
|
||||
objName: 'pfx1-test-object',
|
||||
objVal: {
|
||||
key: 'pfx1-test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
nbVersions: 5,
|
||||
};
|
||||
const timestamp = 0;
|
||||
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||
params.nbVersions, timestamp, logger, next);
|
||||
/* eslint-disable max-len */
|
||||
// { "_id" : "Mpfx1-test-object", "value" : { "key" : "pfx1-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||
// { "_id" : "Vpfx1-test-object{sep}id4", "value" : { "key" : "pfx1-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||
// { "_id" : "Vpfx1-test-object{sep}id3", "value" : { "key" : "pfx1-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:00.004Z" } }
|
||||
// { "_id" : "Vpfx1-test-object{sep}id2", "value" : { "key" : "pfx1-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:00.003Z" } }
|
||||
// { "_id" : "Vpfx1-test-object{sep}id1", "value" : { "key" : "pfx1-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:00.002Z" } }
|
||||
// { "_id" : "Vpfx1-test-object{sep}id0", "value" : { "key" : "pfx1-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||
/* eslint-enable max-len */
|
||||
},
|
||||
next => {
|
||||
const params = {
|
||||
objName: 'pfx2-test-object',
|
||||
objVal: {
|
||||
key: 'pfx2-test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
nbVersions: 5,
|
||||
};
|
||||
const timestamp = 0;
|
||||
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||
params.nbVersions, timestamp, logger, next);
|
||||
/* eslint-disable max-len */
|
||||
// { "_id" : "Mpfx2-test-object", "value" : { "key" : "pfx2-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||
// { "_id" : "Vpfx2-test-object{sep}id4", "value" : { "key" : "pfx2-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||
// { "_id" : "Vpfx2-test-object{sep}id3", "value" : { "key" : "pfx2-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:00.004Z" } }
|
||||
// { "_id" : "Vpfx2-test-object{sep}id2", "value" : { "key" : "pfx2-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:00.003Z" } }
|
||||
// { "_id" : "Vpfx2-test-object{sep}id1", "value" : { "key" : "pfx2-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:00.002Z" } }
|
||||
// { "_id" : "Vpfx1-test-object{sep}id0", "value" : { "key" : "pfx2-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||
/* eslint-enable max-len */
|
||||
},
|
||||
next => {
|
||||
const params = {
|
||||
objName: 'pfx3-test-object',
|
||||
objVal: {
|
||||
key: 'pfx3-test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
nbVersions: 5,
|
||||
};
|
||||
const timestamp = 0;
|
||||
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||
params.nbVersions, timestamp, logger, next);
|
||||
/* eslint-disable max-len */
|
||||
// { "_id" : "Mpfx3-test-object", "value" : { "key" : "pfx3-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||
// { "_id" : "Vpfx3-test-object{sep}id4", "value" : { "key" : "pfx3-test-object", "versionId" : "vid4", "last-modified" : "1970-01-01T00:00:00.005Z" } }
|
||||
// { "_id" : "Vpfx3-test-object{sep}id3", "value" : { "key" : "pfx3-test-object", "versionId" : "vid3", "last-modified" : "1970-01-01T00:00:00.004Z" } }
|
||||
// { "_id" : "Vpfx3-test-object{sep}id2", "value" : { "key" : "pfx3-test-object", "versionId" : "vid2", "last-modified" : "1970-01-01T00:00:00.003Z" } }
|
||||
// { "_id" : "Vpfx3-test-object{sep}id1", "value" : { "key" : "pfx3-test-object", "versionId" : "vid1", "last-modified" : "1970-01-01T00:00:00.002Z" } }
|
||||
// { "_id" : "Vpfx3-test-object{sep}id0", "value" : { "key" : "pfx3-test-object", "versionId" : "vid0", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||
/* eslint-enable max-len */
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
afterEach(done => metadata.deleteBucket(BUCKET_NAME, logger, done));
|
||||
|
||||
it('Should list non-current versions', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 12);
|
||||
const expected = [
|
||||
{
|
||||
key: 'pfx1-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.004Z',
|
||||
staleDate: '1970-01-01T00:00:00.005Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx1-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.003Z',
|
||||
staleDate: '1970-01-01T00:00:00.004Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx1-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.002Z',
|
||||
staleDate: '1970-01-01T00:00:00.003Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx1-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.004Z',
|
||||
staleDate: '1970-01-01T00:00:00.005Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.003Z',
|
||||
staleDate: '1970-01-01T00:00:00.004Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.002Z',
|
||||
staleDate: '1970-01-01T00:00:00.003Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx3-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.004Z',
|
||||
staleDate: '1970-01-01T00:00:00.005Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx3-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.003Z',
|
||||
staleDate: '1970-01-01T00:00:00.004Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx3-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.002Z',
|
||||
staleDate: '1970-01-01T00:00:00.003Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx3-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
];
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return empty list when beforeDate is before the objects stale date', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
beforeDate: '1970-01-01T00:00:00.000Z',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 0);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the non-current versions with stale date older than 1970-01-01T00:00:00.003Z', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
beforeDate: '1970-01-01T00:00:00.003Z',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 3);
|
||||
const expected = [
|
||||
{
|
||||
key: 'pfx1-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx3-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
];
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should list non-current versions three by three', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
maxKeys: 3,
|
||||
};
|
||||
|
||||
return async.series([
|
||||
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx1-test-object');
|
||||
assert.strictEqual(data.NextVersionIdMarker, data.Contents[2].value.VersionId);
|
||||
assert.strictEqual(data.Contents.length, 3);
|
||||
const expected = [
|
||||
{
|
||||
key: 'pfx1-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.004Z',
|
||||
staleDate: '1970-01-01T00:00:00.005Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx1-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.003Z',
|
||||
staleDate: '1970-01-01T00:00:00.004Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx1-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.002Z',
|
||||
staleDate: '1970-01-01T00:00:00.003Z',
|
||||
},
|
||||
];
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
params.keyMarker = data.NextKeyMarker;
|
||||
params.versionIdMarker = data.NextVersionIdMarker;
|
||||
|
||||
return next();
|
||||
}),
|
||||
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx2-test-object');
|
||||
assert.strictEqual(data.NextVersionIdMarker, data.Contents[2].value.VersionId);
|
||||
assert.strictEqual(data.Contents.length, 3);
|
||||
const expected = [
|
||||
{
|
||||
key: 'pfx1-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.004Z',
|
||||
staleDate: '1970-01-01T00:00:00.005Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.003Z',
|
||||
staleDate: '1970-01-01T00:00:00.004Z',
|
||||
},
|
||||
];
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
params.keyMarker = data.NextKeyMarker;
|
||||
params.versionIdMarker = data.NextVersionIdMarker;
|
||||
|
||||
return next();
|
||||
}),
|
||||
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx3-test-object');
|
||||
assert.strictEqual(data.NextVersionIdMarker, data.Contents[2].value.VersionId);
|
||||
assert.strictEqual(data.Contents.length, 3);
|
||||
const expected = [
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.002Z',
|
||||
staleDate: '1970-01-01T00:00:00.003Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx3-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.004Z',
|
||||
staleDate: '1970-01-01T00:00:00.005Z',
|
||||
},
|
||||
];
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
params.keyMarker = data.NextKeyMarker;
|
||||
params.versionIdMarker = data.NextVersionIdMarker;
|
||||
|
||||
return next();
|
||||
}),
|
||||
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 3);
|
||||
const expected = [
|
||||
{
|
||||
key: 'pfx3-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.003Z',
|
||||
staleDate: '1970-01-01T00:00:00.004Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx3-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.002Z',
|
||||
staleDate: '1970-01-01T00:00:00.003Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx3-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
];
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Should list non-current versions four by four', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
maxKeys: 4,
|
||||
};
|
||||
|
||||
return async.series([
|
||||
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx1-test-object');
|
||||
assert.strictEqual(data.NextVersionIdMarker, data.Contents[3].value.VersionId);
|
||||
assert.strictEqual(data.Contents.length, 4);
|
||||
|
||||
const expected = [
|
||||
{
|
||||
key: 'pfx1-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.004Z',
|
||||
staleDate: '1970-01-01T00:00:00.005Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx1-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.003Z',
|
||||
staleDate: '1970-01-01T00:00:00.004Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx1-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.002Z',
|
||||
staleDate: '1970-01-01T00:00:00.003Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx1-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
];
|
||||
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
params.keyMarker = data.NextKeyMarker;
|
||||
params.versionIdMarker = data.NextVersionIdMarker;
|
||||
|
||||
return next();
|
||||
}),
|
||||
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx2-test-object');
|
||||
assert.strictEqual(data.NextVersionIdMarker, data.Contents[3].value.VersionId);
|
||||
assert.strictEqual(data.Contents.length, 4);
|
||||
const expected = [
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.004Z',
|
||||
staleDate: '1970-01-01T00:00:00.005Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.003Z',
|
||||
staleDate: '1970-01-01T00:00:00.004Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.002Z',
|
||||
staleDate: '1970-01-01T00:00:00.003Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
];
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
params.keyMarker = data.NextKeyMarker;
|
||||
params.versionIdMarker = data.NextVersionIdMarker;
|
||||
|
||||
return next();
|
||||
}),
|
||||
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 4);
|
||||
const expected = [
|
||||
{
|
||||
key: 'pfx3-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.004Z',
|
||||
staleDate: '1970-01-01T00:00:00.005Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx3-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.003Z',
|
||||
staleDate: '1970-01-01T00:00:00.004Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx3-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.002Z',
|
||||
staleDate: '1970-01-01T00:00:00.003Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx3-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
];
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Should list non-current versions with a specific prefix two by two', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
maxKeys: 2,
|
||||
prefix: 'pfx2',
|
||||
};
|
||||
|
||||
return async.series([
|
||||
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx2-test-object');
|
||||
assert.strictEqual(data.NextVersionIdMarker, data.Contents[1].value.VersionId);
|
||||
assert.strictEqual(data.Contents.length, 2);
|
||||
|
||||
const expected = [
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.004Z',
|
||||
staleDate: '1970-01-01T00:00:00.005Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.003Z',
|
||||
staleDate: '1970-01-01T00:00:00.004Z',
|
||||
},
|
||||
];
|
||||
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
params.keyMarker = data.NextKeyMarker;
|
||||
params.versionIdMarker = data.NextVersionIdMarker;
|
||||
|
||||
return next();
|
||||
}),
|
||||
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 2);
|
||||
const expected = [
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.002Z',
|
||||
staleDate: '1970-01-01T00:00:00.003Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
];
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
params.keyMarker = data.NextKeyMarker;
|
||||
params.versionIdMarker = data.NextVersionIdMarker;
|
||||
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Should return trucated list of non-current versions after pfx1-test-object key marker', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
maxKeys: 4,
|
||||
keyMarker: 'pfx1-test-object',
|
||||
};
|
||||
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx2-test-object');
|
||||
assert.strictEqual(data.NextVersionIdMarker, data.Contents[3].value.VersionId);
|
||||
assert.strictEqual(data.Contents.length, 4);
|
||||
const expected = [
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.004Z',
|
||||
staleDate: '1970-01-01T00:00:00.005Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.003Z',
|
||||
staleDate: '1970-01-01T00:00:00.004Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.002Z',
|
||||
staleDate: '1970-01-01T00:00:00.003Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
},
|
||||
];
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should list non-current versions that start with prefix', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
prefix: 'pfx2',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 4);
|
||||
const expected = [{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.004Z',
|
||||
staleDate: '1970-01-01T00:00:00.005Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.003Z',
|
||||
staleDate: '1970-01-01T00:00:00.004Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.002Z',
|
||||
staleDate: '1970-01-01T00:00:00.003Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
}];
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should list non-current version that start with prefix and older than beforedate', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
prefix: 'pfx2',
|
||||
maxKeys: 1,
|
||||
beforeDate: '1970-01-01T00:00:00.003Z',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
const expected = [{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.001Z',
|
||||
staleDate: '1970-01-01T00:00:00.002Z',
|
||||
}];
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should truncate list of non-current versions that start with prefix and older than beforedate', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
prefix: 'pfx2',
|
||||
maxKeys: 2,
|
||||
beforeDate: '1970-01-01T00:00:00.005Z',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.Contents.length, 2);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx2-test-object');
|
||||
assert.strictEqual(data.NextVersionIdMarker, data.Contents[1].value.VersionId);
|
||||
const expected = [
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.003Z',
|
||||
staleDate: '1970-01-01T00:00:00.004Z',
|
||||
},
|
||||
{
|
||||
key: 'pfx2-test-object',
|
||||
LastModified: '1970-01-01T00:00:00.002Z',
|
||||
staleDate: '1970-01-01T00:00:00.003Z',
|
||||
},
|
||||
];
|
||||
assertContents(data.Contents, expected);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return Internal error when bucket non existent (non-current)', done => {
|
||||
const bucketName = 'non-existent-bucket';
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
};
|
||||
return metadata.listLifecycleObject(bucketName, params, logger, (err, data) => {
|
||||
expect(err.InternalError).toBe(true);
|
||||
assert(!data);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should not take phd master into account key when listing non-current versions', done => {
|
||||
const objVal = {
|
||||
'key': 'pfx4-test-object',
|
||||
'versionId': 'null',
|
||||
'last-modified': new Date(10000).toISOString(),
|
||||
};
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
};
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
prefix: 'pfx4',
|
||||
};
|
||||
let earlyVersionId;
|
||||
let lastVersionId;
|
||||
async.series([
|
||||
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||
logger, (err, res) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
earlyVersionId = JSON.parse(res).versionId;
|
||||
return next(null);
|
||||
}),
|
||||
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||
logger, next),
|
||||
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||
logger, (err, res) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
lastVersionId = JSON.parse(res).versionId;
|
||||
return next(null);
|
||||
}),
|
||||
next => metadata.deleteObjectMD(BUCKET_NAME, 'pfx4-test-object', { versionId: lastVersionId },
|
||||
logger, next),
|
||||
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(data.Contents[0].value.VersionId, earlyVersionId);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
it('Should not take phd master into account key when listing non-current versions', done => {
|
||||
const objVal = {
|
||||
'key': 'pfx4-test-object',
|
||||
'versionId': 'null',
|
||||
'last-modified': new Date(10000).toISOString(),
|
||||
};
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
};
|
||||
const params = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
prefix: 'pfx4',
|
||||
};
|
||||
|
||||
async.series([
|
||||
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||
logger, next),
|
||||
next => metadata.putObjectMD(BUCKET_NAME, 'pfx4-test-object', objVal, versionParams,
|
||||
logger, next),
|
||||
next => flagObjectForDeletion(collection, 'pfx4-test-object', next),
|
||||
next => metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 0);
|
||||
return next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,458 @@
|
|||
const async = require('async');
|
||||
const assert = require('assert');
|
||||
const werelogs = require('werelogs');
|
||||
const { MongoMemoryReplSet } = require('mongodb-memory-server');
|
||||
const logger = new werelogs.Logger('MongoClientInterface', 'debug', 'debug');
|
||||
const MetadataWrapper =
|
||||
require('../../../../../lib/storage/metadata/MetadataWrapper');
|
||||
const { versioning } = require('../../../../../index');
|
||||
const { BucketVersioningKeyFormat } = versioning.VersioningConstants;
|
||||
const { makeBucketMD, putBulkObjectVersions } = require('./utils');
|
||||
|
||||
const IMPL_NAME = 'mongodb';
|
||||
const DB_NAME = 'metadata';
|
||||
const BUCKET_NAME = 'test-lifecycle-list-orphan-bucket';
|
||||
|
||||
const mongoserver = new MongoMemoryReplSet({
|
||||
debug: false,
|
||||
instanceOpts: [
|
||||
{ port: 27020 },
|
||||
],
|
||||
replSet: {
|
||||
name: 'rs0',
|
||||
count: 1,
|
||||
DB_NAME,
|
||||
storageEngine: 'ephemeralForTest',
|
||||
},
|
||||
});
|
||||
|
||||
describe('MongoClientInterface::metadata.listLifecycleObject::orphan', () => {
|
||||
let metadata;
|
||||
|
||||
beforeAll(done => {
|
||||
mongoserver.waitUntilRunning().then(() => {
|
||||
const opts = {
|
||||
mongodb: {
|
||||
replicaSetHosts: 'localhost:27020',
|
||||
writeConcern: 'majority',
|
||||
replicaSet: 'rs0',
|
||||
readPreference: 'primary',
|
||||
database: DB_NAME,
|
||||
},
|
||||
};
|
||||
metadata = new MetadataWrapper(IMPL_NAME, opts, null, logger);
|
||||
metadata.client.defaultBucketKeyFormat = BucketVersioningKeyFormat.v1;
|
||||
metadata.setup(done);
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(done => {
|
||||
async.series([
|
||||
next => metadata.close(next),
|
||||
next => mongoserver.stop()
|
||||
.then(() => next())
|
||||
.catch(next),
|
||||
], done);
|
||||
});
|
||||
|
||||
beforeEach(done => {
|
||||
const bucketMD = makeBucketMD(BUCKET_NAME);
|
||||
const versionParams = {
|
||||
versioning: true,
|
||||
versionId: null,
|
||||
repairMaster: null,
|
||||
};
|
||||
async.series([
|
||||
next => metadata.createBucket(BUCKET_NAME, bucketMD, logger, next),
|
||||
next => {
|
||||
const keyName = 'pfx0-test-object';
|
||||
|
||||
const objVal = {
|
||||
'key': keyName,
|
||||
'isDeleteMarker': true,
|
||||
'last-modified': new Date(0).toISOString(), // 1970-01-01T00:00:00.000Z
|
||||
};
|
||||
const params = {
|
||||
versioning: true,
|
||||
};
|
||||
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
|
||||
},
|
||||
next => {
|
||||
const params = {
|
||||
objName: 'pfx1-test-object',
|
||||
objVal: {
|
||||
key: 'pfx1-test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
nbVersions: 1,
|
||||
};
|
||||
const timestamp = 0;
|
||||
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||
params.nbVersions, timestamp, logger, next);
|
||||
},
|
||||
next => {
|
||||
const params = {
|
||||
objName: 'pfx2-test-object',
|
||||
objVal: {
|
||||
key: 'pfx2-test-object',
|
||||
versionId: 'null',
|
||||
},
|
||||
nbVersions: 1,
|
||||
};
|
||||
const timestamp = 0;
|
||||
putBulkObjectVersions(metadata, BUCKET_NAME, params.objName, params.objVal, versionParams,
|
||||
params.nbVersions, timestamp, logger, next);
|
||||
},
|
||||
next => {
|
||||
const keyName = 'pfx2-test-object';
|
||||
|
||||
const objVal = {
|
||||
'key': keyName,
|
||||
'isDeleteMarker': true,
|
||||
'last-modified': new Date(2).toISOString(), // 1970-01-01T00:00:00.002Z
|
||||
};
|
||||
const params = {
|
||||
versioning: true,
|
||||
};
|
||||
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
|
||||
},
|
||||
next => {
|
||||
const keyName = 'pfx3-test-object';
|
||||
|
||||
const objVal = {
|
||||
'key': keyName,
|
||||
'isDeleteMarker': true,
|
||||
'last-modified': new Date(0).toISOString(), // 1970-01-01T00:00:00.000Z
|
||||
};
|
||||
const params = {
|
||||
versioning: true,
|
||||
};
|
||||
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
|
||||
},
|
||||
next => {
|
||||
const keyName = 'pfx4-test-object';
|
||||
|
||||
const objVal = {
|
||||
'key': keyName,
|
||||
'isDeleteMarker': true,
|
||||
'last-modified': new Date(5).toISOString(), // 1970-01-01T00:00:00.005Z
|
||||
};
|
||||
const params = {
|
||||
versioning: true,
|
||||
};
|
||||
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
|
||||
},
|
||||
next => {
|
||||
const keyName = 'pfx4-test-object2';
|
||||
|
||||
const objVal = {
|
||||
'key': keyName,
|
||||
'isDeleteMarker': true,
|
||||
'last-modified': new Date(6).toISOString(), // 1970-01-01T00:00:00.006Z
|
||||
};
|
||||
const params = {
|
||||
versioning: true,
|
||||
};
|
||||
return metadata.putObjectMD(BUCKET_NAME, keyName, objVal, params, logger, next);
|
||||
},
|
||||
], done);
|
||||
});
|
||||
/* eslint-disable max-len */
|
||||
// { "_id" : "Mpfx1-test-object", "value" : { "key" : "pfx1-test-object", "versionId" : "v1", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||
// { "_id" : "Vpfx0-test-object{sep}v0", "value" : { "key" : "pfx0-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.000Z", "versionId" : "v0" } }
|
||||
// { "_id" : "Vpfx1-test-object{sep}v1", "value" : { "key" : "pfx1-test-object", "versionId" : "v1", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||
// { "_id" : "Vpfx2-test-object{sep}v3", "value" : { "key" : "pfx2-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.002Z", "versionId" : "v3" } }
|
||||
// { "_id" : "Vpfx2-test-object{sep}v2", "value" : { "key" : "pfx2-test-object", "versionId" : "v2", "last-modified" : "1970-01-01T00:00:00.001Z" } }
|
||||
// { "_id" : "Vpfx3-test-object{sep}v4", "value" : { "key" : "pfx3-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.000Z", "versionId" : "v4" } }
|
||||
// { "_id" : "Vpfx4-test-object{sep}v5", "value" : { "key" : "pfx4-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.005Z", "versionId" : "v5" } }
|
||||
// { "_id" : "Vpfx4-test-object2{sep}v6", "value" : { "key" : "pfx4-test-object", "isDeleteMarker" : true, "last-modified" : "1970-01-01T00:00:00.006Z", "versionId" : "v6" } }
|
||||
/* eslint-enable max-len */
|
||||
|
||||
afterEach(done => {
|
||||
metadata.deleteBucket(BUCKET_NAME, logger, done);
|
||||
});
|
||||
|
||||
it('Should list orphan delete markers', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.Contents.length, 4);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
|
||||
assert.strictEqual(data.Contents[2].key, 'pfx4-test-object');
|
||||
assert.strictEqual(data.Contents[3].key, 'pfx4-test-object2');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return empty list when beforeDate is before youngest last-modified', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
beforeDate: '1970-01-01T00:00:00.000Z',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.Contents.length, 0);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should list orphan delete markers older than 1970-01-01T00:00:00.003Z', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
beforeDate: '1970-01-01T00:00:00.003Z',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.Contents.length, 2);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the first part of the orphan delete markers listing', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
maxKeys: 1,
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx0-test-object');
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the second part of the orphan delete markers listing', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
keyMarker: 'pfx0-test-object',
|
||||
maxKeys: 1,
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx3-test-object');
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx3-test-object');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the third part of the orphan delete markers listing', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
keyMarker: 'pfx3-test-object',
|
||||
maxKeys: 1,
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx4-test-object');
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the fourth part of the orphan delete markers listing', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
keyMarker: 'pfx4-test-object',
|
||||
maxKeys: 1,
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object2');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should list the two first orphan delete markers', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
maxKeys: 2,
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.Contents.length, 2);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx3-test-object');
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should list the four first orphan delete markers', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
maxKeys: 4,
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.Contents.length, 4);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
|
||||
assert.strictEqual(data.Contents[2].key, 'pfx4-test-object');
|
||||
assert.strictEqual(data.Contents[3].key, 'pfx4-test-object2');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return an empty list if no orphan delete marker starts with prefix pfx2', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
prefix: 'pfx2',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.Contents.length, 0);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should list orphan delete markers that start with prefix pfx4', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
prefix: 'pfx4',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.Contents.length, 2);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx4-test-object2');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the first orphan delete marker version that starts with prefix', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
prefix: 'pfx4',
|
||||
maxKeys: 1,
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx4-test-object');
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the following orphan delete marker version that starts with prefix', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
keyMarker: 'pfx4-test-object',
|
||||
prefix: 'pfx4',
|
||||
maxKeys: 1,
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object2');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the truncated list of orphan delete markers older than 1970-01-01T00:00:00.006Z', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
maxKeys: 2,
|
||||
beforeDate: '1970-01-01T00:00:00.006Z',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.Contents.length, 2);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx3-test-object');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the following list of orphan delete markers older than 1970-01-01T00:00:00.006Z', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
maxKeys: 2,
|
||||
beforeDate: '1970-01-01T00:00:00.006Z',
|
||||
keyMarker: 'pfx3-test-object',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx4-test-object');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return the truncated list of orphan delete markers older than 1970-01-01T00:00:00.001Z', done => {
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
maxKeys: 2,
|
||||
beforeDate: '1970-01-01T00:00:00.001Z',
|
||||
};
|
||||
return metadata.listLifecycleObject(BUCKET_NAME, params, logger, (err, data) => {
|
||||
assert.deepStrictEqual(err, null);
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.Contents.length, 2);
|
||||
assert.strictEqual(data.Contents[0].key, 'pfx0-test-object');
|
||||
assert.strictEqual(data.Contents[1].key, 'pfx3-test-object');
|
||||
assert.strictEqual(data.NextKeyMarker, 'pfx3-test-object');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('Should return Internal error when bucket non existent (orphan)', done => {
|
||||
const bucketName = 'non-existent-bucket';
|
||||
const params = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
};
|
||||
return metadata.listLifecycleObject(bucketName, params, logger, (err, data) => {
|
||||
expect(err.InternalError).toBe(true);
|
||||
assert(!data);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,88 @@
|
|||
const async = require('async');
|
||||
const BucketInfo = require('../../../../../lib/models/BucketInfo').default;
|
||||
const assert = require('assert');
|
||||
|
||||
/**
|
||||
* Puts multpile versions of an object
|
||||
* @param {Object} metadata - metadata client
|
||||
* @param {String} bucketName - bucket name
|
||||
* @param {String} objName - object key
|
||||
* @param {Object} objVal - object metadata
|
||||
* @param {Object} params - versioning parameters
|
||||
* @param {number} versionNb - number of versions to put
|
||||
* @param {number} timestamp - used for last-modified
|
||||
* @param {Object} logger - a Logger instance
|
||||
* @param {Function} cb - callback
|
||||
* @returns {undefined}
|
||||
*/
|
||||
function putBulkObjectVersions(metadata, bucketName, objName, objVal, params, versionNb, timestamp, logger, cb) {
|
||||
let count = 0;
|
||||
return async.whilst(
|
||||
() => count < versionNb,
|
||||
cbIterator => {
|
||||
count++;
|
||||
const lastModified = new Date(timestamp + count).toISOString();
|
||||
const finalObjectVal = Object.assign(objVal, { 'last-modified': lastModified });
|
||||
return metadata.putObjectMD(bucketName, objName, finalObjectVal, params,
|
||||
logger, cbIterator);
|
||||
}, cb);
|
||||
}
|
||||
|
||||
function makeBucketMD(bucketName) {
|
||||
return BucketInfo.fromObj({
|
||||
_name: bucketName,
|
||||
_owner: 'testowner',
|
||||
_ownerDisplayName: 'testdisplayname',
|
||||
_creationDate: new Date().toJSON(),
|
||||
_acl: {
|
||||
Canned: 'private',
|
||||
FULL_CONTROL: [],
|
||||
WRITE: [],
|
||||
WRITE_ACP: [],
|
||||
READ: [],
|
||||
READ_ACP: [],
|
||||
},
|
||||
_mdBucketModelVersion: 10,
|
||||
_transient: false,
|
||||
_deleted: false,
|
||||
_serverSideEncryption: null,
|
||||
_versioningConfiguration: null,
|
||||
_locationConstraint: 'us-east-1',
|
||||
_readLocationConstraint: null,
|
||||
_cors: null,
|
||||
_replicationConfiguration: null,
|
||||
_lifecycleConfiguration: null,
|
||||
_uid: '',
|
||||
_isNFS: null,
|
||||
ingestion: null,
|
||||
});
|
||||
}
|
||||
|
||||
function assertContents(contents, expected) {
|
||||
contents.forEach((c, i) => {
|
||||
assert.strictEqual(c.key, expected[i].key);
|
||||
assert.strictEqual(c.value.LastModified, expected[i].LastModified);
|
||||
assert.strictEqual(c.value.staleDate, expected[i].staleDate);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the "deleted" property to true
|
||||
* @param {Object} collection - collection to be updated
|
||||
* @param {string} key - object name
|
||||
* @param {Function} cb - callback
|
||||
* @return {undefined}
|
||||
*/
|
||||
function flagObjectForDeletion(collection, key, cb) {
|
||||
collection.updateMany(
|
||||
{ 'value.key': key },
|
||||
{ $set: { 'value.deleted': true } },
|
||||
{ upsert: false }, cb);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
putBulkObjectVersions,
|
||||
makeBucketMD,
|
||||
assertContents,
|
||||
flagObjectForDeletion,
|
||||
};
|
|
@ -0,0 +1,128 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const assert = require('assert');
|
||||
|
||||
const DelimiterCurrent =
|
||||
require('../../../../lib/algos/list/delimiterCurrent').DelimiterCurrent;
|
||||
const {
|
||||
FILTER_ACCEPT,
|
||||
FILTER_SKIP,
|
||||
FILTER_END,
|
||||
} = require('../../../../lib/algos/list/tools');
|
||||
const VSConst =
|
||||
require('../../../../lib/versioning/constants').VersioningConstants;
|
||||
const { DbPrefixes } = VSConst;
|
||||
|
||||
const VID_SEP = VSConst.VersionId.Separator;
|
||||
const EmptyResult = {
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
};
|
||||
|
||||
const fakeLogger = {
|
||||
trace: () => {},
|
||||
debug: () => {},
|
||||
info: () => {},
|
||||
warn: () => {},
|
||||
error: () => {},
|
||||
fatal: () => {},
|
||||
};
|
||||
|
||||
function makeV1Key(key) {
|
||||
const keyPrefix = key.includes(VID_SEP) ?
|
||||
DbPrefixes.Version : DbPrefixes.Master;
|
||||
return `${keyPrefix}${key}`;
|
||||
}
|
||||
|
||||
describe('DelimiterNonCurrent', () => {
|
||||
it('should accept entry starting with prefix', () => {
|
||||
const delimiter = new DelimiterCurrent({ prefix: 'prefix' }, fakeLogger, 'v1');
|
||||
|
||||
const masterKey = 'prefix1';
|
||||
const date1 = '1970-01-01T00:00:00.001Z';
|
||||
const value1 = `{"last-modified": "${date1}"}`;
|
||||
assert.strictEqual(delimiter.filter({ key: makeV1Key(masterKey), value: value1 }), FILTER_ACCEPT);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey,
|
||||
value: value1,
|
||||
},
|
||||
],
|
||||
IsTruncated: false,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
|
||||
it('should skip entry not starting with prefix', () => {
|
||||
const delimiter = new DelimiterCurrent({ prefix: 'prefix' }, fakeLogger, 'v1');
|
||||
|
||||
const listingKey = makeV1Key('noprefix');
|
||||
assert.strictEqual(delimiter.filter({ key: listingKey, value: '' }), FILTER_SKIP);
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should accept a master and return it', () => {
|
||||
const delimiter = new DelimiterCurrent({ }, fakeLogger, 'v1');
|
||||
|
||||
const masterKey = 'key';
|
||||
|
||||
const date1 = '1970-01-01T00:00:00.001Z';
|
||||
const value1 = `{"last-modified": "${date1}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(masterKey),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey,
|
||||
value: value1,
|
||||
},
|
||||
],
|
||||
IsTruncated: false,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
|
||||
it('should accept two master and return a trucated content', () => {
|
||||
const delimiter = new DelimiterCurrent({ maxKeys: 1 }, fakeLogger, 'v1');
|
||||
|
||||
const masterKey1 = 'key1';
|
||||
const date1 = '1970-01-01T00:00:00.001Z';
|
||||
const value1 = `{"last-modified": "${date1}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(masterKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
const masterKey2 = 'key2';
|
||||
const date2 = '1970-01-01T00:00:00.000Z';
|
||||
const value2 = `{"last-modified": "${date2}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(masterKey2),
|
||||
value: value2,
|
||||
}), FILTER_END);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey1,
|
||||
value: value1,
|
||||
},
|
||||
],
|
||||
NextKeyMarker: masterKey1,
|
||||
IsTruncated: true,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,373 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const assert = require('assert');
|
||||
|
||||
const DelimiterNonCurrent =
|
||||
require('../../../../lib/algos/list/delimiterNonCurrent').DelimiterNonCurrent;
|
||||
const {
|
||||
FILTER_ACCEPT,
|
||||
FILTER_SKIP,
|
||||
FILTER_END,
|
||||
} = require('../../../../lib/algos/list/tools');
|
||||
const VSConst =
|
||||
require('../../../../lib/versioning/constants').VersioningConstants;
|
||||
const { DbPrefixes } = VSConst;
|
||||
|
||||
// TODO: find an acceptable timeout value.
|
||||
const DELIMITER_TIMEOUT_MS = 10 * 1000; // 10s
|
||||
|
||||
|
||||
const VID_SEP = VSConst.VersionId.Separator;
|
||||
const EmptyResult = {
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
};
|
||||
|
||||
const fakeLogger = {
|
||||
trace: () => {},
|
||||
debug: () => {},
|
||||
info: () => {},
|
||||
warn: () => {},
|
||||
error: () => {},
|
||||
fatal: () => {},
|
||||
};
|
||||
|
||||
function makeV1Key(key) {
|
||||
const keyPrefix = key.includes(VID_SEP) ?
|
||||
DbPrefixes.Version : DbPrefixes.Master;
|
||||
return `${keyPrefix}${key}`;
|
||||
}
|
||||
|
||||
describe('DelimiterNonCurrent', () => {
|
||||
it('should accept entry starting with prefix', () => {
|
||||
const delimiter = new DelimiterNonCurrent({ prefix: 'prefix' }, fakeLogger, 'v1');
|
||||
|
||||
const listingKey = makeV1Key('prefix1');
|
||||
assert.strictEqual(delimiter.filter({ key: listingKey, value: '' }), FILTER_ACCEPT);
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should skip entry not starting with prefix', () => {
|
||||
const delimiter = new DelimiterNonCurrent({ prefix: 'prefix' }, fakeLogger, 'v1');
|
||||
|
||||
const listingKey = makeV1Key('noprefix');
|
||||
assert.strictEqual(delimiter.filter({ key: listingKey, value: '' }), FILTER_SKIP);
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should accept a version and return an empty content', () => {
|
||||
const delimiter = new DelimiterNonCurrent({ }, fakeLogger, 'v1');
|
||||
|
||||
const masterKey = 'key';
|
||||
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||
const date1 = '1970-01-01T00:00:00.001Z';
|
||||
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should accept two versions and return the non current version', () => {
|
||||
const delimiter = new DelimiterNonCurrent({ }, fakeLogger, 'v1');
|
||||
|
||||
const masterKey = 'key';
|
||||
|
||||
// filter first version
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||
const date1 = '1970-01-01T00:00:00.002Z';
|
||||
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter second version
|
||||
const versionId2 = 'version2';
|
||||
const versionKey2 = `${masterKey}${VID_SEP}${versionId2}`;
|
||||
const date2 = '1970-01-01T00:00:00.001Z';
|
||||
const value2 = `{"versionId":"${versionId2}", "last-modified": "${date2}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey2),
|
||||
value: value2,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey,
|
||||
value: `{"versionId":"${versionId2}","last-modified":"${date2}","staleDate":"${date1}"}`,
|
||||
},
|
||||
],
|
||||
IsTruncated: false,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
|
||||
it('should accept three versions and return the non current version which stale date before beforeDate', () => {
|
||||
const beforeDate = '1970-01-01T00:00:00.002Z';
|
||||
const delimiter = new DelimiterNonCurrent({ beforeDate }, fakeLogger, 'v1');
|
||||
|
||||
const masterKey = 'key';
|
||||
|
||||
// filter first version
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||
const date1 = beforeDate;
|
||||
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter second version
|
||||
const versionId2 = 'version2';
|
||||
const versionKey2 = `${masterKey}${VID_SEP}${versionId2}`;
|
||||
const date2 = '1970-01-01T00:00:00.001Z';
|
||||
const value2 = `{"versionId":"${versionId2}", "last-modified": "${date2}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey2),
|
||||
value: value2,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter third version
|
||||
const versionId3 = 'version3';
|
||||
const versionKey3 = `${masterKey}${VID_SEP}${versionId3}`;
|
||||
const date3 = '1970-01-01T00:00:00.000Z';
|
||||
const value3 = `{"versionId":"${versionId3}", "last-modified": "${date3}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey3),
|
||||
value: value3,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey,
|
||||
value: `{"versionId":"${versionId3}","last-modified":"${date3}","staleDate":"${date2}"}`,
|
||||
},
|
||||
],
|
||||
IsTruncated: false,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
|
||||
it('should accept one delete marker and one versions and return the non current version', () => {
|
||||
const delimiter = new DelimiterNonCurrent({ }, fakeLogger, 'v1');
|
||||
|
||||
// const version = new Version({ isDeleteMarker: true });
|
||||
const masterKey = 'key';
|
||||
|
||||
// filter delete marker
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||
const date1 = '1970-01-01T00:00:00.002Z';
|
||||
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}", "isDeleteMarker": true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter second version
|
||||
const versionId2 = 'version2';
|
||||
const versionKey2 = `${masterKey}${VID_SEP}${versionId2}`;
|
||||
const date2 = '1970-01-01T00:00:00.001Z';
|
||||
const value2 = `{"versionId":"${versionId2}", "last-modified": "${date2}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey2),
|
||||
value: value2,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey,
|
||||
value: `{"versionId":"${versionId2}","last-modified":"${date2}","staleDate":"${date1}"}`,
|
||||
},
|
||||
],
|
||||
IsTruncated: false,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
|
||||
it('should end filtering if max keys reached', () => {
|
||||
const delimiter = new DelimiterNonCurrent({ maxKeys: 1 }, fakeLogger, 'v1');
|
||||
|
||||
const masterKey = 'key';
|
||||
|
||||
// filter delete marker
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||
const date1 = '1970-01-01T00:00:00.002Z';
|
||||
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}", "isDeleteMarker": true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter second version
|
||||
const versionId2 = 'version2';
|
||||
const versionKey2 = `${masterKey}${VID_SEP}${versionId2}`;
|
||||
const date2 = '1970-01-01T00:00:00.001Z';
|
||||
const value2 = `{"versionId":"${versionId2}", "last-modified": "${date2}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey2),
|
||||
value: value2,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter third version
|
||||
const versionId3 = 'version3';
|
||||
const versionKey3 = `${masterKey}${VID_SEP}${versionId3}`;
|
||||
const date3 = '1970-01-01T00:00:00.000Z';
|
||||
const value3 = `{"versionId":"${versionId3}", "last-modified": "${date3}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey3),
|
||||
value: value3,
|
||||
}), FILTER_END);
|
||||
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey,
|
||||
value: `{"versionId":"${versionId2}","last-modified":"${date2}","staleDate":"${date1}"}`,
|
||||
},
|
||||
],
|
||||
IsTruncated: true,
|
||||
NextKeyMarker: masterKey,
|
||||
NextVersionIdMarker: versionId2,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
|
||||
it('should end filtering if delimiter timeout', () => {
|
||||
const delimiter = new DelimiterNonCurrent({ }, fakeLogger, 'v1');
|
||||
|
||||
const masterKey = 'key';
|
||||
|
||||
// filter delete marker
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||
const date1 = '1970-01-01T00:00:00.002Z';
|
||||
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}", "isDeleteMarker": true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter second version
|
||||
const versionId2 = 'version2';
|
||||
const versionKey2 = `${masterKey}${VID_SEP}${versionId2}`;
|
||||
const date2 = '1970-01-01T00:00:00.001Z';
|
||||
const value2 = `{"versionId":"${versionId2}", "last-modified": "${date2}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey2),
|
||||
value: value2,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// force delimiter to timeout.
|
||||
delimiter.start = Date.now() - (DELIMITER_TIMEOUT_MS + 1);
|
||||
|
||||
// filter third version
|
||||
const versionId3 = 'version3';
|
||||
const versionKey3 = `${masterKey}${VID_SEP}${versionId3}`;
|
||||
const date3 = '1970-01-01T00:00:00.000Z';
|
||||
const value3 = `{"versionId":"${versionId3}", "last-modified": "${date3}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey3),
|
||||
value: value3,
|
||||
}), FILTER_END);
|
||||
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey,
|
||||
value: `{"versionId":"${versionId2}","last-modified":"${date2}","staleDate":"${date1}"}`,
|
||||
},
|
||||
],
|
||||
IsTruncated: true,
|
||||
NextKeyMarker: masterKey,
|
||||
NextVersionIdMarker: versionId2,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
|
||||
it('should end filtering if delimiter timeout with empty content', () => {
|
||||
const delimiter = new DelimiterNonCurrent({ }, fakeLogger, 'v1');
|
||||
|
||||
// filter current version
|
||||
const masterKey1 = 'key1';
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||
const date1 = '1970-01-01T00:00:00.002Z';
|
||||
const value1 = `{"versionId":"${versionId1}", "last-modified": "${date1}"`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter current version
|
||||
const masterKey2 = 'key2';
|
||||
const versionId2 = 'version2';
|
||||
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||
const date2 = '1970-01-01T00:00:00.001Z';
|
||||
const value2 = `{"versionId":"${versionId2}", "last-modified": "${date2}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey2),
|
||||
value: value2,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// force delimiter to timeout.
|
||||
delimiter.start = Date.now() - (DELIMITER_TIMEOUT_MS + 1);
|
||||
|
||||
// filter current version
|
||||
const masterKey3 = 'key3';
|
||||
const versionId3 = 'version3';
|
||||
const versionKey3 = `${masterKey3}${VID_SEP}${versionId3}`;
|
||||
const date3 = '1970-01-01T00:00:00.000Z';
|
||||
const value3 = `{"versionId":"${versionId3}", "last-modified": "${date3}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey3),
|
||||
value: value3,
|
||||
}), FILTER_END);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [],
|
||||
IsTruncated: true,
|
||||
NextKeyMarker: masterKey2,
|
||||
NextVersionIdMarker: versionId2,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,378 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
const assert = require('assert');
|
||||
|
||||
const DelimiterOrphan =
|
||||
require('../../../../lib/algos/list/delimiterOrphan').DelimiterOrphan;
|
||||
const {
|
||||
FILTER_ACCEPT,
|
||||
FILTER_SKIP,
|
||||
FILTER_END,
|
||||
} = require('../../../../lib/algos/list/tools');
|
||||
const VSConst =
|
||||
require('../../../../lib/versioning/constants').VersioningConstants;
|
||||
const { DbPrefixes } = VSConst;
|
||||
|
||||
// TODO: find an acceptable timeout value.
|
||||
const DELIMITER_TIMEOUT_MS = 10 * 1000; // 10s
|
||||
|
||||
|
||||
const VID_SEP = VSConst.VersionId.Separator;
|
||||
const EmptyResult = {
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
};
|
||||
|
||||
const fakeLogger = {
|
||||
trace: () => {},
|
||||
debug: () => {},
|
||||
info: () => {},
|
||||
warn: () => {},
|
||||
error: () => {},
|
||||
fatal: () => {},
|
||||
};
|
||||
|
||||
function makeV1Key(key) {
|
||||
const keyPrefix = key.includes(VID_SEP) ?
|
||||
DbPrefixes.Version : DbPrefixes.Master;
|
||||
return `${keyPrefix}${key}`;
|
||||
}
|
||||
|
||||
describe('DelimiterNonCurrent', () => {
|
||||
it('should accept entry starting with prefix', () => {
|
||||
const delimiter = new DelimiterOrphan({ prefix: 'prefix' }, fakeLogger, 'v1');
|
||||
|
||||
const listingKey = makeV1Key('prefix1');
|
||||
assert.strictEqual(delimiter.filter({ key: listingKey, value: '' }), FILTER_ACCEPT);
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should skip entry not starting with prefix', () => {
|
||||
const delimiter = new DelimiterOrphan({ prefix: 'prefix' }, fakeLogger, 'v1');
|
||||
|
||||
const listingKey = makeV1Key('noprefix');
|
||||
assert.strictEqual(delimiter.filter({ key: listingKey, value: '' }), FILTER_SKIP);
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should accept a version and return an empty content', () => {
|
||||
const delimiter = new DelimiterOrphan({ }, fakeLogger, 'v1');
|
||||
|
||||
const masterKey = 'key';
|
||||
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||
const date1 = '1970-01-01T00:00:00.001Z';
|
||||
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||
});
|
||||
|
||||
it('should accept an orphan delete marker and return it from the content', () => {
|
||||
const delimiter = new DelimiterOrphan({ }, fakeLogger, 'v1');
|
||||
|
||||
const masterKey = 'key';
|
||||
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey}${VID_SEP}${versionId1}`;
|
||||
const date1 = '1970-01-01T00:00:00.001Z';
|
||||
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}","isDeleteMarker":true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey,
|
||||
value: value1,
|
||||
},
|
||||
],
|
||||
IsTruncated: false,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
|
||||
it('should accept two orphan delete markers and return them from the content', () => {
|
||||
const delimiter = new DelimiterOrphan({ }, fakeLogger, 'v1');
|
||||
|
||||
// filter the first orphan delete marker
|
||||
const masterKey1 = 'key1';
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||
const date1 = '1970-01-01T00:00:00.002Z';
|
||||
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}","isDeleteMarker":true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter the second orphan delete marker
|
||||
const masterKey2 = 'key2';
|
||||
const versionId2 = 'version2';
|
||||
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||
const date2 = '1970-01-01T00:00:00.001Z';
|
||||
const value2 = `{"versionId":"${versionId2}","last-modified":"${date2}","isDeleteMarker":true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey2),
|
||||
value: value2,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey1,
|
||||
value: value1,
|
||||
},
|
||||
{
|
||||
key: masterKey2,
|
||||
value: value2,
|
||||
},
|
||||
],
|
||||
IsTruncated: false,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
|
||||
it('should accept two orphan delete markers and return truncated content with one', () => {
|
||||
const delimiter = new DelimiterOrphan({ maxKeys: 1 }, fakeLogger, 'v1');
|
||||
|
||||
// filter the first orphan delete marker
|
||||
const masterKey1 = 'key1';
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||
const date1 = '1970-01-01T00:00:00.002Z';
|
||||
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}","isDeleteMarker":true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter the second orphan delete marker
|
||||
const masterKey2 = 'key2';
|
||||
const versionId2 = 'version2';
|
||||
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||
const date2 = '1970-01-01T00:00:00.001Z';
|
||||
const value2 = `{"versionId":"${versionId2}","last-modified":"${date2}","isDeleteMarker":true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey2),
|
||||
value: value2,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey1,
|
||||
value: value1,
|
||||
},
|
||||
],
|
||||
NextKeyMarker: masterKey1,
|
||||
IsTruncated: true,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
|
||||
it('should accept two orphan delete markers and return the one created before the beforeDate', () => {
|
||||
const date1 = '1970-01-01T00:00:00.002Z';
|
||||
const delimiter = new DelimiterOrphan({ beforeDate: date1 }, fakeLogger, 'v1');
|
||||
|
||||
// filter the first orphan delete marker
|
||||
const masterKey1 = 'key1';
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}","isDeleteMarker":true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter the second orphan delete marker
|
||||
const masterKey2 = 'key2';
|
||||
const versionId2 = 'version2';
|
||||
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||
const date2 = '1970-01-01T00:00:00.001Z';
|
||||
const value2 = `{"versionId":"${versionId2}","last-modified":"${date2}","isDeleteMarker":true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey2),
|
||||
value: value2,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey2,
|
||||
value: value2,
|
||||
},
|
||||
],
|
||||
IsTruncated: false,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
|
||||
it('should end filtering if max keys reached', () => {
|
||||
const delimiter = new DelimiterOrphan({ maxKeys: 1 }, fakeLogger, 'v1');
|
||||
|
||||
// filter the first orphan delete marker
|
||||
const masterKey1 = 'key1';
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||
const date1 = '1970-01-01T00:00:00.002Z';
|
||||
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}","isDeleteMarker":true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter the second orphan delete marker
|
||||
const masterKey2 = 'key2';
|
||||
const versionId2 = 'version2';
|
||||
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||
const date2 = '1970-01-01T00:00:00.001Z';
|
||||
const value2 = `{"versionId":"${versionId2}","last-modified":"${date2}","isDeleteMarker":true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey2),
|
||||
value: value2,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter the third orphan delete marker
|
||||
const masterKey3 = 'key3';
|
||||
const versionId3 = 'version3';
|
||||
const versionKey3 = `${masterKey3}${VID_SEP}${versionId3}`;
|
||||
const date3 = '1970-01-01T00:00:00.000Z';
|
||||
const value3 = `{"versionId":"${versionId3}","last-modified":"${date3}","isDeleteMarker":true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey3),
|
||||
value: value3,
|
||||
}), FILTER_END);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey1,
|
||||
value: value1,
|
||||
},
|
||||
],
|
||||
NextKeyMarker: masterKey1,
|
||||
IsTruncated: true,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
|
||||
it('should end filtering if delimiter timeout', () => {
|
||||
const delimiter = new DelimiterOrphan({ }, fakeLogger, 'v1');
|
||||
|
||||
// filter the first orphan delete marker
|
||||
const masterKey1 = 'key1';
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||
const date1 = '1970-01-01T00:00:00.002Z';
|
||||
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}","isDeleteMarker":true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// force delimiter to timeout.
|
||||
delimiter.start = Date.now() - (DELIMITER_TIMEOUT_MS + 1);
|
||||
|
||||
// filter the second orphan delete marker
|
||||
const masterKey2 = 'key2';
|
||||
const versionId2 = 'version2';
|
||||
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||
const date2 = '1970-01-01T00:00:00.001Z';
|
||||
const value2 = `{"versionId":"${versionId2}","last-modified":"${date2}","isDeleteMarker":true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey2),
|
||||
value: value2,
|
||||
}), FILTER_END);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [
|
||||
{
|
||||
key: masterKey1,
|
||||
value: value1,
|
||||
},
|
||||
],
|
||||
NextKeyMarker: masterKey1,
|
||||
IsTruncated: true,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
|
||||
it('should end filtering if delimiter timeout with empty content', () => {
|
||||
const delimiter = new DelimiterOrphan({ }, fakeLogger, 'v1');
|
||||
|
||||
// filter the first orphan delete marker
|
||||
const masterKey1 = 'key1';
|
||||
const versionId1 = 'version1';
|
||||
const versionKey1 = `${masterKey1}${VID_SEP}${versionId1}`;
|
||||
const date1 = '1970-01-01T00:00:00.002Z';
|
||||
const value1 = `{"versionId":"${versionId1}","last-modified":"${date1}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey1),
|
||||
value: value1,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// filter the second orphan delete marker
|
||||
const masterKey2 = 'key2';
|
||||
const versionId2 = 'version2';
|
||||
const versionKey2 = `${masterKey2}${VID_SEP}${versionId2}`;
|
||||
const date2 = '1970-01-01T00:00:00.001Z';
|
||||
const value2 = `{"versionId":"${versionId2}","last-modified":"${date2}"}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey2),
|
||||
value: value2,
|
||||
}), FILTER_ACCEPT);
|
||||
|
||||
// force delimiter to timeout.
|
||||
delimiter.start = Date.now() - (DELIMITER_TIMEOUT_MS + 1);
|
||||
|
||||
// filter the third orphan delete marker
|
||||
const masterKey3 = 'key3';
|
||||
const versionId3 = 'version3';
|
||||
const versionKey3 = `${masterKey3}${VID_SEP}${versionId3}`;
|
||||
const date3 = '1970-01-01T00:00:00.000Z';
|
||||
const value3 = `{"versionId":"${versionId3}","last-modified":"${date3}","isDeleteMarker":true}`;
|
||||
|
||||
assert.strictEqual(delimiter.filter({
|
||||
key: makeV1Key(versionKey3),
|
||||
value: value3,
|
||||
}), FILTER_END);
|
||||
|
||||
const expectedResult = {
|
||||
Contents: [],
|
||||
NextKeyMarker: masterKey2,
|
||||
IsTruncated: true,
|
||||
};
|
||||
|
||||
assert.deepStrictEqual(delimiter.result(), expectedResult);
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue