Compare commits

...

3 Commits

Author SHA1 Message Date
Jonathan Gramain 879cd3c57e Merge branches 'bugfix/S3C-2899-vformatV1delimiterMaster', 'bugfix/S3C-2899-vformatV1delimiterVersions' and 'bugfix/S3C-2899-vformatV1MPU' into user/jonathan/S3C-2899-implem-v1 2020-05-20 12:26:06 -07:00
Jonathan Gramain fb89b4e683 bugfix: S3C-2899 support v1 in Delimiter, DelimiterMaster
The two listing methods Delimiter and DelimiterMaster now support v1
versioning key format in addition to v0.

Modify the listing algo classes to support buckets in v1 versioning
key format, in addition to v0.

Enhance existing unit tests to check the result of getMDParams()
2020-05-19 16:45:09 -07:00
Jonathan Gramain 1bda8559bc bugfix: S3C-2899 support vFormat v1 for MPU listing
Support listing MPUs stored with versioning key format v1
2020-05-19 16:44:42 -07:00
7 changed files with 962 additions and 585 deletions

View File

@ -1,10 +1,10 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const errors = require('../../errors'); const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
const { inc, checkLimit, FILTER_END, FILTER_ACCEPT } = require('./tools'); FILTER_END, FILTER_ACCEPT } = require('./tools');
const DEFAULT_MAX_KEYS = 1000; const DEFAULT_MAX_KEYS = 1000;
const VSConst = require('../../versioning/constants').VersioningConstants; const VSConst = require('../../versioning/constants').VersioningConstants;
const { BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
function numberDefault(num, defaultNum) { function numberDefault(num, defaultNum) {
const parsedNum = Number.parseInt(num, 10); const parsedNum = Number.parseInt(num, 10);
@ -38,9 +38,20 @@ class MultipartUploads {
this.delimiter = params.delimiter; this.delimiter = params.delimiter;
this.splitter = params.splitter; this.splitter = params.splitter;
this.logger = logger; this.logger = logger;
Object.assign(this, {
[BucketVersioningKeyFormat.v0]: {
genMDParams: this.genMDParamsV0,
getObjectKey: this.getObjectKeyV0,
},
[BucketVersioningKeyFormat.v1]: {
genMDParams: this.genMDParamsV1,
getObjectKey: this.getObjectKeyV1,
},
}[this.vFormat]);
} }
genMDParams() { genMDParamsV0() {
const params = {}; const params = {};
if (this.params.keyMarker) { if (this.params.keyMarker) {
params.gt = `overview${this.params.splitter}` + params.gt = `overview${this.params.splitter}` +
@ -62,6 +73,11 @@ class MultipartUploads {
return params; return params;
} }
genMDParamsV1() {
const v0params = this.genMDParamsV0();
return listingParamsMasterKeysV0ToV1(v0params);
}
/** /**
* This function adds the elements to the Uploads * This function adds the elements to the Uploads
* Set the NextKeyMarker to the current key * Set the NextKeyMarker to the current key
@ -106,11 +122,12 @@ class MultipartUploads {
} }
} }
_getObjectKey(obj) { getObjectKeyV0(obj) {
if (this.vFormat === BucketVersioningKeyFormat.v0) { return obj.key;
return obj.key; }
}
throw errors.NotImplemented; getObjectKeyV1(obj) {
return obj.key.slice(DbPrefixes.Master.length);
} }
/** /**
@ -125,7 +142,7 @@ class MultipartUploads {
this.IsTruncated = this.maxKeys > 0; this.IsTruncated = this.maxKeys > 0;
return FILTER_END; return FILTER_END;
} }
const key = this._getObjectKey(obj); const key = this.getObjectKey(obj);
const value = obj.value; const value = obj.value;
if (this.delimiter) { if (this.delimiter) {
const mpuPrefixSlice = `overview${this.splitter}`.length; const mpuPrefixSlice = `overview${this.splitter}`.length;

View File

@ -1,10 +1,10 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const errors = require('../../errors');
const Extension = require('./Extension').default; const Extension = require('./Extension').default;
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools'); const { inc, listingParamsMasterKeysV0ToV1,
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
const VSConst = require('../../versioning/constants').VersioningConstants; const VSConst = require('../../versioning/constants').VersioningConstants;
const { BucketVersioningKeyFormat } = VSConst; const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
/** /**
* Find the common prefix in the path * Find the common prefix in the path
@ -92,13 +92,19 @@ class Delimiter extends Extension {
this[this.nextContinueMarker].slice(0, nextDelimiterIndex + this[this.nextContinueMarker].slice(0, nextDelimiterIndex +
this.delimiter.length); this.delimiter.length);
} }
}
genMDParams() { Object.assign(this, {
if (this.vFormat === BucketVersioningKeyFormat.v0) { [BucketVersioningKeyFormat.v0]: {
return this.genMDParamsV0(); genMDParams: this.genMDParamsV0,
} getObjectKey: this.getObjectKeyV0,
throw errors.NotImplemented; skipping: this.skippingV0,
},
[BucketVersioningKeyFormat.v1]: {
genMDParams: this.genMDParamsV1,
getObjectKey: this.getObjectKeyV1,
skipping: this.skippingV1,
},
}[this.vFormat]);
} }
genMDParamsV0() { genMDParamsV0() {
@ -118,6 +124,11 @@ class Delimiter extends Extension {
return params; return params;
} }
genMDParamsV1() {
const params = this.genMDParamsV0();
return listingParamsMasterKeysV0ToV1(params);
}
/** /**
* check if the max keys count has been reached and set the * check if the max keys count has been reached and set the
* final state of the result if it is the case * final state of the result if it is the case
@ -150,11 +161,12 @@ class Delimiter extends Extension {
return FILTER_ACCEPT; return FILTER_ACCEPT;
} }
_getObjectKey(obj) { getObjectKeyV0(obj) {
if (this.vFormat === BucketVersioningKeyFormat.v0) { return obj.key;
return obj.key; }
}
throw errors.NotImplemented; getObjectKeyV1(obj) {
return obj.key.slice(DbPrefixes.Master.length);
} }
/** /**
@ -169,7 +181,7 @@ class Delimiter extends Extension {
* @return {number} - indicates if iteration should continue * @return {number} - indicates if iteration should continue
*/ */
filter(obj) { filter(obj) {
const key = this._getObjectKey(obj); const key = this.getObjectKey(obj);
const value = obj.value; const value = obj.value;
if ((this.prefix && !key.startsWith(this.prefix)) if ((this.prefix && !key.startsWith(this.prefix))
|| (this.alphabeticalOrder || (this.alphabeticalOrder
@ -210,22 +222,27 @@ class Delimiter extends Extension {
} }
/** /**
* If repd happens to want to skip listing, here is an idea. * If repd happens to want to skip listing on a bucket in v0
* versioning key format, here is an idea.
* *
* @return {string} - the present range (NextMarker) if repd believes * @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on * that it's enough and should move on
*/ */
skipping() {
if (this.vFormat === BucketVersioningKeyFormat.v0) {
return this.skippingV0();
}
throw errors.NotImplemented;
}
skippingV0() { skippingV0() {
return this[this.nextContinueMarker]; return this[this.nextContinueMarker];
} }
/**
* If repd happens to want to skip listing on a bucket in v1
* versioning key format, here is an idea.
*
* @return {string} - the present range (NextMarker) if repd believes
* that it's enough and should move on
*/
skippingV1() {
return DbPrefixes.Master + this[this.nextContinueMarker];
}
/** /**
* Return an object containing all mandatory fields to use once the * Return an object containing all mandatory fields to use once the
* iteration is done, doesn't show a NextMarker field if the output * iteration is done, doesn't show a NextMarker field if the output

View File

@ -1,6 +1,5 @@
'use strict'; // eslint-disable-line strict 'use strict'; // eslint-disable-line strict
const errors = require('../../errors');
const Delimiter = require('./delimiter').Delimiter; const Delimiter = require('./delimiter').Delimiter;
const Version = require('../../versioning/Version').Version; const Version = require('../../versioning/Version').Version;
const VSConst = require('../../versioning/constants').VersioningConstants; const VSConst = require('../../versioning/constants').VersioningConstants;
@ -8,6 +7,7 @@ const { BucketVersioningKeyFormat } = VSConst;
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools'); const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
const VID_SEP = VSConst.VersionId.Separator; const VID_SEP = VSConst.VersionId.Separator;
const { DbPrefixes } = VSConst;
/** /**
* Handle object listing with parameters. This extends the base class Delimiter * Handle object listing with parameters. This extends the base class Delimiter
@ -32,15 +32,31 @@ class DelimiterMaster extends Delimiter {
// non-PHD master version or a version whose master is a PHD version // non-PHD master version or a version whose master is a PHD version
this.prvKey = undefined; this.prvKey = undefined;
this.prvPHDKey = undefined; this.prvPHDKey = undefined;
Object.assign(this, {
[BucketVersioningKeyFormat.v0]: {
filter: this.filterV0,
skipping: this.skippingV0,
},
[BucketVersioningKeyFormat.v1]: {
filter: this.filterV1,
skipping: this.skippingV1,
},
}[this.vFormat]);
} }
filter(obj) { /**
if (this.vFormat === BucketVersioningKeyFormat.v0) { * Filter to apply on each iteration for buckets in v0 format,
return this.filterV0(obj); * based on:
} * - prefix
throw errors.NotImplemented; * - delimiter
} * - maxKeys
* The marker is being handled directly by levelDB
* @param {Object} obj - The key and value of the element
* @param {String} obj.key - The key of the element
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filterV0(obj) { filterV0(obj) {
let key = obj.key; let key = obj.key;
const value = obj.value; const value = obj.value;
@ -120,7 +136,26 @@ class DelimiterMaster extends Delimiter {
return this.addContents(key, value); return this.addContents(key, value);
} }
skipping() { /**
* Filter to apply on each iteration for buckets in v1 format,
* based on:
* - prefix
* - delimiter
* - maxKeys
* The marker is being handled directly by levelDB
* @param {Object} obj - The key and value of the element
* @param {String} obj.key - The key of the element
* @param {String} obj.value - The value of the element
* @return {number} - indicates if iteration should continue
*/
filterV1(obj) {
// Filtering master keys in v1 is simply listing the master
// keys, as the state of version keys do not change the
// result, so we can use Delimiter method directly.
return super.filter(obj);
}
skippingV0() {
if (this[this.nextContinueMarker]) { if (this[this.nextContinueMarker]) {
// next marker or next continuation token: // next marker or next continuation token:
// - foo/ : skipping foo/ // - foo/ : skipping foo/
@ -134,6 +169,14 @@ class DelimiterMaster extends Delimiter {
} }
return SKIP_NONE; return SKIP_NONE;
} }
skippingV1() {
const skipTo = this.skippingV0();
if (skipTo === SKIP_NONE) {
return SKIP_NONE;
}
return DbPrefixes.Master + skipTo;
}
} }
module.exports = { DelimiterMaster }; module.exports = { DelimiterMaster };

View File

@ -7,80 +7,86 @@ const werelogs = require('werelogs').Logger;
// eslint-disable-next-line new-cap // eslint-disable-next-line new-cap
const logger = new werelogs('listMpuTest'); const logger = new werelogs('listMpuTest');
const performListing = require('../../../utils/performListing'); const performListing = require('../../../utils/performListing');
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
const { DbPrefixes } = VSConst;
describe('Multipart Uploads listing algorithm', () => { describe('Multipart Uploads listing algorithm', () => {
const splitter = '**'; const splitter = '**';
const overviewPrefix = `overview${splitter}`; const overviewPrefix = `overview${splitter}`;
const storageClass = 'STANDARD'; const storageClass = 'STANDARD';
const initiator1 = { ID: '1', DisplayName: 'initiator1' }; const initiator1 = { ID: '1', DisplayName: 'initiator1' };
const initiator2 = { ID: '2', DisplayName: 'initiator2' }; const initiator2 = { ID: '2', DisplayName: 'initiator2' };
const keys = [ const keys = {
{ v0: [`${overviewPrefix}test/1${splitter}uploadId1`,
key: `${overviewPrefix}test/1${splitter}uploadId1`, `${overviewPrefix}test/2${splitter}uploadId2`,
value: JSON.stringify({ `${overviewPrefix}test/3${splitter}uploadId3`,
'key': 'test/1', `${overviewPrefix}testMore/4${splitter}uploadId4`,
'uploadId': 'uploadId1', `${overviewPrefix}testMore/5${splitter}uploadId5`,
'initiator': initiator1, `${overviewPrefix}prefixTest/5${splitter}uploadId5`,
'owner-id': '1', ],
'owner-display-name': 'owner1', v1: [`${DbPrefixes.Master}${overviewPrefix}test/1${splitter}uploadId1`,
'x-amz-storage-class': storageClass, `${DbPrefixes.Master}${overviewPrefix}test/2${splitter}uploadId2`,
'initiated': '', `${DbPrefixes.Master}${overviewPrefix}test/3${splitter}uploadId3`,
}), `${DbPrefixes.Master}${overviewPrefix}testMore/4${splitter}uploadId4`,
}, { `${DbPrefixes.Master}${overviewPrefix}testMore/5${splitter}uploadId5`,
key: `${overviewPrefix}test/2${splitter}uploadId2`, `${DbPrefixes.Master}${overviewPrefix}prefixTest/5${splitter}uploadId5`,
value: JSON.stringify({ ],
'key': 'test/2', };
'uploadId': 'uploadId2', const values = [
'initiator': initiator2, JSON.stringify({
'owner-id': '1', 'key': 'test/1',
'owner-display-name': 'owner2', 'uploadId': 'uploadId1',
'x-amz-storage-class': storageClass, 'initiator': initiator1,
'initiated': '', 'owner-id': '1',
}), 'owner-display-name': 'owner1',
}, { 'x-amz-storage-class': storageClass,
key: `${overviewPrefix}test/3${splitter}uploadId3`, 'initiated': '',
value: JSON.stringify({ }),
'key': 'test/3', JSON.stringify({
'uploadId': 'uploadId3', 'key': 'test/2',
'initiator': initiator1, 'uploadId': 'uploadId2',
'owner-id': '1', 'initiator': initiator2,
'owner-display-name': 'owner1', 'owner-id': '1',
'x-amz-storage-class': storageClass, 'owner-display-name': 'owner2',
'initiated': '', 'x-amz-storage-class': storageClass,
}), 'initiated': '',
}, { }),
key: `${overviewPrefix}testMore/4${splitter}uploadId4`, JSON.stringify({
value: JSON.stringify({ 'key': 'test/3',
'key': 'testMore/4', 'uploadId': 'uploadId3',
'uploadId': 'uploadId4', 'initiator': initiator1,
'initiator': initiator2, 'owner-id': '1',
'owner-id': '1', 'owner-display-name': 'owner1',
'owner-display-name': 'owner2', 'x-amz-storage-class': storageClass,
'x-amz-storage-class': storageClass, 'initiated': '',
'initiated': '', }),
}), JSON.stringify({
}, { 'key': 'testMore/4',
key: `${overviewPrefix}testMore/5${splitter}uploadId5`, 'uploadId': 'uploadId4',
value: JSON.stringify({ 'initiator': initiator2,
'key': 'testMore/5', 'owner-id': '1',
'uploadId': 'uploadId5', 'owner-display-name': 'owner2',
'initiator': initiator1, 'x-amz-storage-class': storageClass,
'owner-id': '1', 'initiated': '',
'owner-display-name': 'owner1', }),
'x-amz-storage-class': storageClass, JSON.stringify({
'initiated': '', 'key': 'testMore/5',
}), 'uploadId': 'uploadId5',
}, { 'initiator': initiator1,
key: `${overviewPrefix}prefixTest/5${splitter}uploadId5`, 'owner-id': '1',
value: JSON.stringify({ 'owner-display-name': 'owner1',
'key': 'prefixTest/5', 'x-amz-storage-class': storageClass,
'uploadId': 'uploadId5', 'initiated': '',
'initiator': initiator1, }),
'owner-id': '1', JSON.stringify({
'owner-display-name': 'owner1', 'key': 'prefixTest/5',
'x-amz-storage-class': storageClass, 'uploadId': 'uploadId5',
'initiated': '', 'initiator': initiator1,
}), 'owner-id': '1',
}, 'owner-display-name': 'owner1',
'x-amz-storage-class': storageClass,
'initiated': '',
}),
]; ];
let listingParams; let listingParams;
let expectedResult; let expectedResult;
@ -103,8 +109,8 @@ describe('Multipart Uploads listing algorithm', () => {
NextUploadIdMarker: 'uploadId5', NextUploadIdMarker: 'uploadId5',
}; };
expectedResult.Uploads = keys.map(obj => { expectedResult.Uploads = values.map(value => {
const tmp = JSON.parse(obj.value); const tmp = JSON.parse(value);
return { return {
key: tmp.key, key: tmp.key,
value: { value: {
@ -122,44 +128,47 @@ describe('Multipart Uploads listing algorithm', () => {
done(); done();
}); });
it('should perform a listing of all keys', done => { ['v0', 'v1'].forEach(vFormat => {
const listingResult = performListing(keys, MultipartUploads, const dbListing = keys[vFormat].map((key, i) => ({
listingParams, logger); key,
assert.deepStrictEqual(listingResult, expectedResult); value: values[i],
done(); }));
}); it(`should perform a vFormat=${vFormat} listing of all keys`, () => {
const listingResult = performListing(dbListing, MultipartUploads,
listingParams, logger, vFormat);
assert.deepStrictEqual(listingResult, expectedResult);
});
it('should perform a listing with delimiter', done => { it(`should perform a vFormat=${vFormat} listing with delimiter`, () => {
const delimiter = '/'; const delimiter = '/';
listingParams.delimiter = delimiter; listingParams.delimiter = delimiter;
// format result // format result
expectedResult.Uploads = []; expectedResult.Uploads = [];
expectedResult.CommonPrefixes = ['test/', 'testMore/', 'prefixTest/']; expectedResult.CommonPrefixes = ['test/', 'testMore/', 'prefixTest/'];
expectedResult.Delimiter = delimiter; expectedResult.Delimiter = delimiter;
expectedResult.MaxKeys = 1000; expectedResult.MaxKeys = 1000;
expectedResult.NextKeyMarker = 'prefixTest/'; expectedResult.NextKeyMarker = 'prefixTest/';
expectedResult.NextUploadIdMarker = ''; expectedResult.NextUploadIdMarker = '';
const listingResult = performListing(keys, MultipartUploads, const listingResult = performListing(dbListing, MultipartUploads,
listingParams, logger); listingParams, logger, vFormat);
assert.deepStrictEqual(listingResult, expectedResult); assert.deepStrictEqual(listingResult, expectedResult);
done(); });
});
it('should perform a listing with max keys', done => { it(`should perform a vFormat=${vFormat} listing with max keys`, () => {
listingParams.maxKeys = 3; listingParams.maxKeys = 3;
// format result // format result
expectedResult.Uploads.pop(); expectedResult.Uploads.pop();
expectedResult.Uploads.pop(); expectedResult.Uploads.pop();
expectedResult.Uploads.pop(); expectedResult.Uploads.pop();
expectedResult.NextKeyMarker = 'test/3'; expectedResult.NextKeyMarker = 'test/3';
expectedResult.NextUploadIdMarker = 'uploadId3'; expectedResult.NextUploadIdMarker = 'uploadId3';
expectedResult.IsTruncated = true; expectedResult.IsTruncated = true;
expectedResult.MaxKeys = 3; expectedResult.MaxKeys = 3;
const listingResult = performListing(keys, MultipartUploads, const listingResult = performListing(dbListing, MultipartUploads,
listingParams, logger); listingParams, logger, vFormat);
assert.deepStrictEqual(listingResult, expectedResult); assert.deepStrictEqual(listingResult, expectedResult);
done(); });
}); });
}); });

View File

@ -9,11 +9,15 @@ const Werelogs = require('werelogs').Logger;
const logger = new Werelogs('listTest'); const logger = new Werelogs('listTest');
const performListing = require('../../../utils/performListing'); const performListing = require('../../../utils/performListing');
const zpad = require('../../helpers').zpad; const zpad = require('../../helpers').zpad;
const { inc } = require('../../../../lib/algos/list/tools');
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
const { DbPrefixes } = VSConst;
class Test { class Test {
constructor(name, input, output, filter) { constructor(name, input, genMDParams, output, filter) {
this.name = name; this.name = name;
this.input = input; this.input = input;
this.genMDParams = genMDParams;
this.output = output; this.output = output;
this.filter = filter || this._defaultFilter; this.filter = filter || this._defaultFilter;
} }
@ -38,6 +42,7 @@ const data = [
{ key: 'notes/yore.rs', value }, { key: 'notes/yore.rs', value },
{ key: 'notes/zaphod/Beeblebrox.txt', value }, { key: 'notes/zaphod/Beeblebrox.txt', value },
]; ];
const dataVersioned = [ const dataVersioned = [
{ key: 'Pâtisserie=中文-español-English', value }, { key: 'Pâtisserie=中文-español-English', value },
{ key: 'Pâtisserie=中文-español-English\0bar', value }, { key: 'Pâtisserie=中文-español-English\0bar', value },
@ -85,6 +90,12 @@ const receivedNonAlphaData = nonAlphabeticalData.map(
const tests = [ const tests = [
new Test('all elements', {}, { new Test('all elements', {}, {
v0: {},
v1: {
gte: DbPrefixes.Master,
lt: inc(DbPrefixes.Master),
},
}, {
Contents: receivedData, Contents: receivedData,
CommonPrefixes: [], CommonPrefixes: [],
Delimiter: undefined, Delimiter: undefined,
@ -93,6 +104,14 @@ const tests = [
}), }),
new Test('with valid marker', { new Test('with valid marker', {
marker: receivedData[4].key, marker: receivedData[4].key,
}, {
v0: {
gt: receivedData[4].key,
},
v1: {
gt: `${DbPrefixes.Master}${receivedData[4].key}`,
lt: inc(DbPrefixes.Master),
},
}, { }, {
Contents: [ Contents: [
receivedData[5], receivedData[5],
@ -109,6 +128,14 @@ const tests = [
new Test('with bad marker', { new Test('with bad marker', {
marker: 'zzzz', marker: 'zzzz',
delimiter: '/', delimiter: '/',
}, {
v0: {
gt: 'zzzz',
},
v1: {
gt: `${DbPrefixes.Master}zzzz`,
lt: inc(DbPrefixes.Master),
},
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: [], CommonPrefixes: [],
@ -118,6 +145,12 @@ const tests = [
}, (e, input) => e.key > input.marker), }, (e, input) => e.key > input.marker),
new Test('with makKeys', { new Test('with makKeys', {
maxKeys: 3, maxKeys: 3,
}, {
v0: {},
v1: {
gte: DbPrefixes.Master,
lt: inc(DbPrefixes.Master),
},
}, { }, {
Contents: receivedData.slice(0, 3), Contents: receivedData.slice(0, 3),
CommonPrefixes: [], CommonPrefixes: [],
@ -127,6 +160,12 @@ const tests = [
}), }),
new Test('with big makKeys', { new Test('with big makKeys', {
maxKeys: 15000, maxKeys: 15000,
}, {
v0: {},
v1: {
gte: DbPrefixes.Master,
lt: inc(DbPrefixes.Master),
},
}, { }, {
Contents: receivedData, Contents: receivedData,
CommonPrefixes: [], CommonPrefixes: [],
@ -136,6 +175,12 @@ const tests = [
}), }),
new Test('with delimiter', { new Test('with delimiter', {
delimiter: '/', delimiter: '/',
}, {
v0: {},
v1: {
gte: DbPrefixes.Master,
lt: inc(DbPrefixes.Master),
},
}, { }, {
Contents: [ Contents: [
receivedData[0], receivedData[0],
@ -147,6 +192,12 @@ const tests = [
}), }),
new Test('with long delimiter', { new Test('with long delimiter', {
delimiter: 'notes/summer', delimiter: 'notes/summer',
}, {
v0: {},
v1: {
gte: DbPrefixes.Master,
lt: inc(DbPrefixes.Master),
},
}, { }, {
Contents: [ Contents: [
receivedData[0], receivedData[0],
@ -166,6 +217,15 @@ const tests = [
delimiter: '/', delimiter: '/',
prefix: 'notes/summer/', prefix: 'notes/summer/',
marker: 'notes/summer0', marker: 'notes/summer0',
}, {
v0: {
gt: `notes/summer${inc('/')}`,
lt: `notes/summer${inc('/')}`,
},
v1: {
gt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
},
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: [], CommonPrefixes: [],
@ -176,6 +236,15 @@ const tests = [
new Test('delimiter and prefix (related to #147)', { new Test('delimiter and prefix (related to #147)', {
delimiter: '/', delimiter: '/',
prefix: 'notes/', prefix: 'notes/',
}, {
v0: {
gte: 'notes/',
lt: `notes${inc('/')}`,
},
v1: {
gte: `${DbPrefixes.Master}notes/`,
lt: `${DbPrefixes.Master}notes${inc('/')}`,
},
}, { }, {
Contents: [ Contents: [
receivedData[7], receivedData[7],
@ -194,6 +263,15 @@ const tests = [
delimiter: '/', delimiter: '/',
prefix: 'notes/', prefix: 'notes/',
marker: 'notes/year.txt', marker: 'notes/year.txt',
}, {
v0: {
gt: 'notes/year.txt',
lt: `notes${inc('/')}`,
},
v1: {
gt: `${DbPrefixes.Master}notes/year.txt`,
lt: `${DbPrefixes.Master}notes${inc('/')}`,
},
}, { }, {
Contents: [ Contents: [
receivedData[8], receivedData[8],
@ -210,6 +288,15 @@ const tests = [
prefix: 'notes/', prefix: 'notes/',
marker: 'notes/', marker: 'notes/',
maxKeys: 1, maxKeys: 1,
}, {
v0: {
gt: 'notes/',
lt: `notes${inc('/')}`,
},
v1: {
gt: `${DbPrefixes.Master}notes/`,
lt: `${DbPrefixes.Master}notes${inc('/')}`,
},
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: ['notes/spring/'], CommonPrefixes: ['notes/spring/'],
@ -223,6 +310,15 @@ const tests = [
prefix: 'notes/', // prefix prefix: 'notes/', // prefix
marker: 'notes/spring/', marker: 'notes/spring/',
maxKeys: 1, maxKeys: 1,
}, {
v0: {
gt: 'notes/spring/',
lt: `notes${inc('/')}`,
},
v1: {
gt: `${DbPrefixes.Master}notes/spring/`,
lt: `${DbPrefixes.Master}notes${inc('/')}`,
},
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: ['notes/summer/'], CommonPrefixes: ['notes/summer/'],
@ -236,6 +332,15 @@ const tests = [
prefix: 'notes/', // prefix prefix: 'notes/', // prefix
marker: 'notes/summer/', marker: 'notes/summer/',
maxKeys: 1, maxKeys: 1,
}, {
v0: {
gt: 'notes/summer/',
lt: `notes${inc('/')}`,
},
v1: {
gt: `${DbPrefixes.Master}notes/summer/`,
lt: `${DbPrefixes.Master}notes${inc('/')}`,
},
}, { }, {
Contents: [ Contents: [
receivedData[7], receivedData[7],
@ -251,6 +356,15 @@ const tests = [
prefix: 'notes/', // prefix prefix: 'notes/', // prefix
marker: 'notes/year.txt', marker: 'notes/year.txt',
maxKeys: 1, maxKeys: 1,
}, {
v0: {
gt: 'notes/year.txt',
lt: `notes${inc('/')}`,
},
v1: {
gt: `${DbPrefixes.Master}notes/year.txt`,
lt: `${DbPrefixes.Master}notes${inc('/')}`,
},
}, { }, {
Contents: [ Contents: [
receivedData[8], receivedData[8],
@ -266,6 +380,15 @@ const tests = [
prefix: 'notes/', prefix: 'notes/',
marker: 'notes/yore.rs', marker: 'notes/yore.rs',
maxKeys: 1, maxKeys: 1,
}, {
v0: {
gt: 'notes/yore.rs',
lt: `notes${inc('/')}`,
},
v1: {
gt: `${DbPrefixes.Master}notes/yore.rs`,
lt: `${DbPrefixes.Master}notes${inc('/')}`,
},
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: ['notes/zaphod/'], CommonPrefixes: ['notes/zaphod/'],
@ -276,6 +399,12 @@ const tests = [
new Test('all elements v2', { new Test('all elements v2', {
v2: true, v2: true,
}, {
v0: {},
v1: {
gte: DbPrefixes.Master,
lt: inc(DbPrefixes.Master),
},
}, { }, {
Contents: receivedData, Contents: receivedData,
CommonPrefixes: [], CommonPrefixes: [],
@ -286,6 +415,14 @@ const tests = [
new Test('with valid startAfter', { new Test('with valid startAfter', {
startAfter: receivedData[4].key, startAfter: receivedData[4].key,
v2: true, v2: true,
}, {
v0: {
gt: receivedData[4].key,
},
v1: {
gt: `${DbPrefixes.Master}${receivedData[4].key}`,
lt: inc(DbPrefixes.Master),
},
}, { }, {
Contents: [ Contents: [
receivedData[5], receivedData[5],
@ -303,6 +440,14 @@ const tests = [
startAfter: 'zzzz', startAfter: 'zzzz',
delimiter: '/', delimiter: '/',
v2: true, v2: true,
}, {
v0: {
gt: 'zzzz',
},
v1: {
gt: `${DbPrefixes.Master}zzzz`,
lt: inc(DbPrefixes.Master),
},
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: [], CommonPrefixes: [],
@ -313,6 +458,14 @@ const tests = [
new Test('with valid continuationToken', { new Test('with valid continuationToken', {
continuationToken: receivedData[4].key, continuationToken: receivedData[4].key,
v2: true, v2: true,
}, {
v0: {
gt: receivedData[4].key,
},
v1: {
gt: `${DbPrefixes.Master}${receivedData[4].key}`,
lt: inc(DbPrefixes.Master),
},
}, { }, {
Contents: [ Contents: [
receivedData[5], receivedData[5],
@ -330,6 +483,14 @@ const tests = [
continuationToken: 'zzzz', continuationToken: 'zzzz',
delimiter: '/', delimiter: '/',
v2: true, v2: true,
}, {
v0: {
gt: 'zzzz',
},
v1: {
gt: `${DbPrefixes.Master}zzzz`,
lt: inc(DbPrefixes.Master),
},
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: [], CommonPrefixes: [],
@ -341,6 +502,15 @@ const tests = [
delimiter: '/', delimiter: '/',
prefix: 'notes/summer/', prefix: 'notes/summer/',
startAfter: 'notes/summer0', startAfter: 'notes/summer0',
}, {
v0: {
gte: 'notes/summer/',
lt: `notes/summer${inc('/')}`,
},
v1: {
gte: `${DbPrefixes.Master}notes/summer/`,
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
},
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: [], CommonPrefixes: [],
@ -352,6 +522,15 @@ const tests = [
delimiter: '/', delimiter: '/',
prefix: 'notes/summer/', prefix: 'notes/summer/',
continuationToken: 'notes/summer0', continuationToken: 'notes/summer0',
}, {
v0: {
gte: 'notes/summer/',
lt: `notes/summer${inc('/')}`,
},
v1: {
gte: `${DbPrefixes.Master}notes/summer/`,
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
},
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: [], CommonPrefixes: [],
@ -364,6 +543,14 @@ const tests = [
startAfter: 'notes/year.txt', startAfter: 'notes/year.txt',
maxKeys: 1, maxKeys: 1,
v2: true, v2: true,
}, {
v0: {
gt: 'notes/year.txt',
},
v1: {
gt: `${DbPrefixes.Master}notes/year.txt`,
lt: inc(DbPrefixes.Master),
},
}, { }, {
Contents: [ Contents: [
receivedData[8], receivedData[8],
@ -380,6 +567,15 @@ const tests = [
startAfter: 'notes/', startAfter: 'notes/',
maxKeys: 1, maxKeys: 1,
v2: true, v2: true,
}, {
v0: {
gt: 'notes/',
lt: `notes${inc('/')}`,
},
v1: {
gt: `${DbPrefixes.Master}notes/`,
lt: `${DbPrefixes.Master}notes${inc('/')}`,
},
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: ['notes/spring/'], CommonPrefixes: ['notes/spring/'],
@ -394,6 +590,15 @@ const tests = [
continuationToken: 'notes/spring/', continuationToken: 'notes/spring/',
maxKeys: 1, maxKeys: 1,
v2: true, v2: true,
}, {
v0: {
gt: 'notes/spring/',
lt: `notes${inc('/')}`,
},
v1: {
gt: `${DbPrefixes.Master}notes/spring/`,
lt: `${DbPrefixes.Master}notes${inc('/')}`,
},
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: ['notes/summer/'], CommonPrefixes: ['notes/summer/'],
@ -408,6 +613,15 @@ const tests = [
continuationToken: 'notes/summer/', continuationToken: 'notes/summer/',
maxKeys: 1, maxKeys: 1,
v2: true, v2: true,
}, {
v0: {
gt: 'notes/summer/',
lt: `notes${inc('/')}`,
},
v1: {
gt: `${DbPrefixes.Master}notes/summer/`,
lt: `${DbPrefixes.Master}notes${inc('/')}`,
},
}, { }, {
Contents: [ Contents: [
receivedData[7], receivedData[7],
@ -424,6 +638,15 @@ const tests = [
startAfter: 'notes/year.txt', startAfter: 'notes/year.txt',
maxKeys: 1, maxKeys: 1,
v2: true, v2: true,
}, {
v0: {
gt: 'notes/year.txt',
lt: `notes${inc('/')}`,
},
v1: {
gt: `${DbPrefixes.Master}notes/year.txt`,
lt: `${DbPrefixes.Master}notes${inc('/')}`,
},
}, { }, {
Contents: [ Contents: [
receivedData[8], receivedData[8],
@ -440,6 +663,15 @@ const tests = [
startAfter: 'notes/yore.rs', startAfter: 'notes/yore.rs',
maxKeys: 1, maxKeys: 1,
v2: true, v2: true,
}, {
v0: {
gt: 'notes/yore.rs',
lt: `notes${inc('/')}`,
},
v1: {
gt: `${DbPrefixes.Master}notes/yore.rs`,
lt: `${DbPrefixes.Master}notes${inc('/')}`,
},
}, { }, {
Contents: [], Contents: [],
CommonPrefixes: ['notes/zaphod/'], CommonPrefixes: ['notes/zaphod/'],
@ -472,80 +704,107 @@ const alphabeticalOrderTests = [
}, },
]; ];
function getTestListing(test, data, vFormat) {
return data
.filter(e => test.filter(e, test.input))
.map(obj => {
if (vFormat === 'v0') {
return obj;
}
if (vFormat === 'v1') {
return {
key: `${DbPrefixes.Master}${obj.key}`,
value: obj.value,
};
}
return assert.fail(`bad format ${vFormat}`);
});
}
describe('Delimiter listing algorithm', () => { ['v0', 'v1'].forEach(vFormat => {
it('Should return good skipping value for DelimiterMaster', done => { describe(`vFormat=${vFormat} Delimiter listing algorithm`, () => {
const delimiter = new DelimiterMaster({ delimiter: '/' }); it('Should return good skipping value for DelimiterMaster', () => {
for (let i = 0; i < 100; i++) { const delimiter = new DelimiterMaster({ delimiter: '/' });
delimiter.filter({ key: `foo/${zpad(i)}`, value: '{}' }); for (let i = 0; i < 100; i++) {
delimiter.filter({
key: `${vFormat === 'v1' ? DbPrefixes.Master : ''}foo/${zpad(i)}`,
value: '{}',
});
}
assert.strictEqual(delimiter.skipping(),
`${vFormat === 'v1' ? DbPrefixes.Master : ''}foo/`);
});
it('Should set Delimiter alphabeticalOrder field to the expected value', () => {
alphabeticalOrderTests.forEach(test => {
const delimiter = new Delimiter(test.params);
assert.strictEqual(delimiter.alphabeticalOrder,
test.expectedValue,
`${JSON.stringify(test.params)}`);
});
});
tests.forEach(test => {
it(`Should return metadata listing params to list ${test.name}`, () => {
const listing = new Delimiter(test.input, logger, vFormat);
const params = listing.genMDParams();
assert.deepStrictEqual(params, test.genMDParams[vFormat]);
});
it(`Should list ${test.name}`, () => {
// Simulate skip scan done by LevelDB
const d = getTestListing(test, data, vFormat);
const res = performListing(d, Delimiter, test.input, logger, vFormat);
assert.deepStrictEqual(res, test.output);
});
});
// Only v0 gets a listing of master and version keys together.
if (vFormat === 'v0') {
tests.forEach(test => {
it(`Should list master versions ${test.name}`, () => {
// Simulate skip scan done by LevelDB
const d = dataVersioned.filter(e => test.filter(e, test.input));
const res = performListing(d, DelimiterMaster, test.input, logger, vFormat);
assert.deepStrictEqual(res, test.output);
});
});
} }
assert.strictEqual(delimiter.skipping(), 'foo/');
done();
});
it('Should set Delimiter alphabeticalOrder field to the expected value', it('Should filter values according to alphabeticalOrder parameter', () => {
() => { let test = new Test('alphabeticalOrder parameter set', {
alphabeticalOrderTests.forEach(test => { delimiter: '/',
const delimiter = new Delimiter(test.params); alphabeticalOrder: true,
assert.strictEqual(delimiter.alphabeticalOrder, }, {
test.expectedValue, }, {
`${JSON.stringify(test.params)}`); Contents: [
}); receivedNonAlphaData[0],
}); ],
Delimiter: '/',
tests.forEach(test => { CommonPrefixes: [],
it(`Should list ${test.name}`, done => { IsTruncated: false,
// Simulate skip scan done by LevelDB NextMarker: undefined,
const d = data.filter(e => test.filter(e, test.input)); });
const res = performListing(d, Delimiter, test.input, logger); let d = getTestListing(test, nonAlphabeticalData, vFormat);
let res = performListing(d, Delimiter, test.input, logger, vFormat);
assert.deepStrictEqual(res, test.output);
test = new Test('alphabeticalOrder parameter set', {
delimiter: '/',
alphabeticalOrder: false,
}, {
}, {
Contents: [
receivedNonAlphaData[0],
receivedNonAlphaData[1],
],
Delimiter: '/',
CommonPrefixes: [],
IsTruncated: false,
NextMarker: undefined,
});
d = getTestListing(test, nonAlphabeticalData, vFormat);
res = performListing(d, Delimiter, test.input, logger, vFormat);
assert.deepStrictEqual(res, test.output); assert.deepStrictEqual(res, test.output);
done();
}); });
}); });
tests.forEach(test => {
it(`Should list master versions ${test.name}`, done => {
// Simulate skip scan done by LevelDB
const d = dataVersioned.filter(e => test.filter(e, test.input));
const res = performListing(d, DelimiterMaster, test.input, logger);
assert.deepStrictEqual(res, test.output);
done();
});
});
it('Should filter values according to alphabeticalOrder parameter',
() => {
let test = new Test('alphabeticalOrder parameter set', {
delimiter: '/',
alphabeticalOrder: true,
}, {
Contents: [
receivedNonAlphaData[0],
],
Delimiter: '/',
CommonPrefixes: [],
IsTruncated: false,
NextMarker: undefined,
});
let d = nonAlphabeticalData.filter(e => test.filter(e, test.input));
let res = performListing(d, Delimiter, test.input, logger);
assert.deepStrictEqual(res, test.output);
test = new Test('alphabeticalOrder parameter set', {
delimiter: '/',
alphabeticalOrder: false,
}, {
Contents: [
receivedNonAlphaData[0],
receivedNonAlphaData[1],
],
Delimiter: '/',
CommonPrefixes: [],
IsTruncated: false,
NextMarker: undefined,
});
d = nonAlphabeticalData.filter(e => test.filter(e, test.input));
res = performListing(d, Delimiter, test.input, logger);
assert.deepStrictEqual(res, test.output);
});
}); });

View File

@ -13,6 +13,7 @@ const VSConst =
require('../../../../lib/versioning/constants').VersioningConstants; require('../../../../lib/versioning/constants').VersioningConstants;
const Version = require('../../../../lib/versioning/Version').Version; const Version = require('../../../../lib/versioning/Version').Version;
const { generateVersionId } = require('../../../../lib/versioning/VersionID'); const { generateVersionId } = require('../../../../lib/versioning/VersionID');
const { DbPrefixes } = VSConst;
const VID_SEP = VSConst.VersionId.Separator; const VID_SEP = VSConst.VersionId.Separator;
@ -33,395 +34,426 @@ const fakeLogger = {
fatal: () => {}, fatal: () => {},
}; };
describe('Delimiter All masters listing algorithm', () => { function getListingKey(key, vFormat) {
it('should return SKIP_NONE for DelimiterMaster when both NextMarker ' + if (vFormat === 'v0') {
'and NextContinuationToken are undefined', () => { return key;
const delimiter = new DelimiterMaster({ delimiter: '/' }, fakeLogger); }
if (vFormat === 'v1') {
return `${DbPrefixes.Master}${key}`;
}
return assert.fail(`bad vFormat ${vFormat}`);
}
assert.strictEqual(delimiter.NextMarker, undefined); ['v0', 'v1'].forEach(vFormat => {
describe(`Delimiter All masters listing algorithm vFormat=${vFormat}`, () => {
it('should return SKIP_NONE for DelimiterMaster when both NextMarker ' +
'and NextContinuationToken are undefined', () => {
const delimiter = new DelimiterMaster({ delimiter: '/' }, fakeLogger, vFormat);
// When there is no NextMarker or NextContinuationToken, it should assert.strictEqual(delimiter.NextMarker, undefined);
// return SKIP_NONE
assert.strictEqual(delimiter.skipping(), SKIP_NONE);
});
it('should return <key><VersionIdSeparator> for DelimiterMaster when ' + // When there is no NextMarker or NextContinuationToken, it should
'NextMarker is set and there is a delimiter', () => { // return SKIP_NONE
const key = 'key'; assert.strictEqual(delimiter.skipping(), SKIP_NONE);
const delimiter = new DelimiterMaster({ delimiter: '/', marker: key },
fakeLogger);
/* Filter a master version to set NextMarker. */
// TODO: useless once S3C-1628 is fixed.
delimiter.filter({ key, value: '' });
assert.strictEqual(delimiter.NextMarker, key);
/* With a delimiter skipping should return previous key + VID_SEP
* (except when a delimiter is set and the NextMarker ends with the
* delimiter) . */
assert.strictEqual(delimiter.skipping(), key + VID_SEP);
});
it('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
'NextContinuationToken is set and there is a delimiter', () => {
const key = 'key';
const delimiter = new DelimiterMaster(
{ delimiter: '/', startAfter: key, v2: true },
fakeLogger);
// Filter a master version to set NextContinuationToken
delimiter.filter({ key, value: '' });
assert.strictEqual(delimiter.NextContinuationToken, key);
assert.strictEqual(delimiter.skipping(), key + VID_SEP);
});
it('should return NextMarker for DelimiterMaster when NextMarker is set' +
', there is a delimiter and the key ends with the delimiter', () => {
const delimiterChar = '/';
const keyWithEndingDelimiter = `key${delimiterChar}`;
const delimiter = new DelimiterMaster({
delimiter: delimiterChar,
marker: keyWithEndingDelimiter,
}, fakeLogger);
/* When a delimiter is set and the NextMarker ends with the
* delimiter it should return the next marker value. */
assert.strictEqual(delimiter.NextMarker, keyWithEndingDelimiter);
assert.strictEqual(delimiter.skipping(), keyWithEndingDelimiter);
});
it('should skip entries not starting with prefix', () => {
const delimiter = new DelimiterMaster({ prefix: 'prefix' }, fakeLogger);
assert.strictEqual(delimiter.filter({ key: 'wrong' }), FILTER_SKIP);
assert.strictEqual(delimiter.NextMarker, undefined);
assert.strictEqual(delimiter.prvKey, undefined);
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should skip entries inferior to next marker', () => {
const delimiter = new DelimiterMaster({ marker: 'b' }, fakeLogger);
assert.strictEqual(delimiter.filter({ key: 'a' }), FILTER_SKIP);
assert.strictEqual(delimiter.NextMarker, 'b');
assert.strictEqual(delimiter.prvKey, undefined);
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should accept a master version', () => {
const delimiter = new DelimiterMaster({}, fakeLogger);
const key = 'key';
const value = '';
assert.strictEqual(delimiter.filter({ key, value }), FILTER_ACCEPT);
assert.strictEqual(delimiter.prvKey, key);
assert.strictEqual(delimiter.NextMarker, key);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key, value }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
}); });
});
it('should accept a PHD version as first input', () => { it('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
const delimiter = new DelimiterMaster({}, fakeLogger); 'NextMarker is set and there is a delimiter', () => {
const keyPHD = 'keyPHD'; const key = 'key';
const objPHD = { const delimiter = new DelimiterMaster({ delimiter: '/', marker: key },
key: keyPHD, fakeLogger, vFormat);
value: Version.generatePHDVersion(generateVersionId('', '')),
};
/* When filtered, it should return FILTER_ACCEPT and set the prvKey. /* Filter a master version to set NextMarker. */
* to undefined. It should not be added to result the content or common // TODO: useless once S3C-1628 is fixed.
* prefixes. */ const listingKey = getListingKey(key, vFormat);
assert.strictEqual(delimiter.filter(objPHD), FILTER_ACCEPT); delimiter.filter({ key: listingKey, value: '' });
assert.strictEqual(delimiter.prvKey, undefined); assert.strictEqual(delimiter.NextMarker, key);
assert.strictEqual(delimiter.NextMarker, undefined);
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should accept a PHD version', () => { /* With a delimiter skipping should return previous key + VID_SEP
const delimiter = new DelimiterMaster({}, fakeLogger); * (except when a delimiter is set and the NextMarker ends with the
const key = 'keyA'; * delimiter) . */
const value = ''; assert.strictEqual(delimiter.skipping(), listingKey + VID_SEP);
const keyPHD = 'keyBPHD';
const objPHD = {
key: keyPHD,
value: Version.generatePHDVersion(generateVersionId('', '')),
};
/* Filter a master version to set the NextMarker, the prvKey and add
* and element in result content. */
delimiter.filter({ key, value });
/* When filtered, it should return FILTER_ACCEPT and set the prvKey.
* to undefined. It should not be added to the result content or common
* prefixes. */
assert.strictEqual(delimiter.filter(objPHD), FILTER_ACCEPT);
assert.strictEqual(delimiter.prvKey, undefined);
assert.strictEqual(delimiter.NextMarker, key);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key, value }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
}); });
});
it('should accept a version after a PHD', () => { it('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
const delimiter = new DelimiterMaster({}, fakeLogger); 'NextContinuationToken is set and there is a delimiter', () => {
const masterKey = 'key'; const key = 'key';
const keyVersion = `${masterKey}${VID_SEP}version`; const delimiter = new DelimiterMaster(
const value = ''; { delimiter: '/', startAfter: key, v2: true },
const objPHD = { fakeLogger, vFormat);
key: masterKey,
value: Version.generatePHDVersion(generateVersionId('', '')),
};
/* Filter the PHD object. */ // Filter a master version to set NextContinuationToken
delimiter.filter(objPHD); const listingKey = getListingKey(key, vFormat);
delimiter.filter({ key: listingKey, value: '' });
assert.strictEqual(delimiter.NextContinuationToken, key);
/* The filtering of the PHD object has no impact, the version is assert.strictEqual(delimiter.skipping(), listingKey + VID_SEP);
* accepted and added to the result. */
assert.strictEqual(delimiter.filter({
key: keyVersion,
value,
}), FILTER_ACCEPT);
assert.strictEqual(delimiter.prvKey, masterKey);
assert.strictEqual(delimiter.NextMarker, masterKey);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key: masterKey, value }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
}); });
});
it('should accept a delete marker', () => { it('should return NextMarker for DelimiterMaster when NextMarker is set' +
const delimiter = new DelimiterMaster({}, fakeLogger); ', there is a delimiter and the key ends with the delimiter', () => {
const version = new Version({ isDeleteMarker: true }); const delimiterChar = '/';
const key = 'key'; const keyWithEndingDelimiter = `key${delimiterChar}`;
const obj = { const delimiter = new DelimiterMaster({
key: `${key}${VID_SEP}version`, delimiter: delimiterChar,
value: version.toString(), marker: keyWithEndingDelimiter,
}; }, fakeLogger, vFormat);
/* When filtered, it should return FILTER_SKIP and set the prvKey. It /* When a delimiter is set and the NextMarker ends with the
* should not be added to the result content or common prefixes. */ * delimiter it should return the next marker value. */
assert.strictEqual(delimiter.filter(obj), FILTER_SKIP); assert.strictEqual(delimiter.NextMarker, keyWithEndingDelimiter);
assert.strictEqual(delimiter.NextMarker, undefined); const skipKey = vFormat === 'v1' ?
assert.strictEqual(delimiter.prvKey, key); `${DbPrefixes.Master}${keyWithEndingDelimiter}` :
assert.deepStrictEqual(delimiter.result(), EmptyResult); keyWithEndingDelimiter;
}); assert.strictEqual(delimiter.skipping(), skipKey);
it('should skip version after a delete marker', () => {
const delimiter = new DelimiterMaster({}, fakeLogger);
const version = new Version({ isDeleteMarker: true });
const key = 'key';
const versionKey = `${key}${VID_SEP}version`;
delimiter.filter({ key, value: version.toString() });
assert.strictEqual(delimiter.filter({
key: versionKey,
value: 'value',
}), FILTER_SKIP);
assert.strictEqual(delimiter.NextMarker, undefined);
assert.strictEqual(delimiter.prvKey, key);
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should accept a new key after a delete marker', () => {
const delimiter = new DelimiterMaster({}, fakeLogger);
const version = new Version({ isDeleteMarker: true });
const key1 = 'key1';
const key2 = 'key2';
const value = 'value';
delimiter.filter({ key: key1, value: version.toString() });
assert.strictEqual(delimiter.filter({
key: key2,
value: 'value',
}), FILTER_ACCEPT);
assert.strictEqual(delimiter.NextMarker, key2);
assert.strictEqual(delimiter.prvKey, key2);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key: key2, value }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
}); });
});
it('should accept the master version and skip the other ones', () => { it('should skip entries not starting with prefix', () => {
const delimiter = new DelimiterMaster({}, fakeLogger); const delimiter = new DelimiterMaster({ prefix: 'prefix' }, fakeLogger, vFormat);
const masterKey = 'key';
const masterValue = 'value';
const versionKey = `${masterKey}${VID_SEP}version`;
const versionValue = 'versionvalue';
/* Filter the master version. */ const listingKey = getListingKey('wrong', vFormat);
delimiter.filter({ key: masterKey, value: masterValue }); assert.strictEqual(delimiter.filter({ key: listingKey }), FILTER_SKIP);
assert.strictEqual(delimiter.NextMarker, undefined);
/* Version is skipped, not added to the result. The delimiter assert.strictEqual(delimiter.prvKey, undefined);
* NextMarker and prvKey value are unmodified and set to the assert.deepStrictEqual(delimiter.result(), EmptyResult);
* masterKey. */
assert.strictEqual(delimiter.filter({
key: versionKey,
value: versionValue,
}), FILTER_SKIP);
assert.strictEqual(delimiter.NextMarker, masterKey);
assert.strictEqual(delimiter.prvKey, masterKey);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key: masterKey, value: masterValue }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
}); });
});
it('should return good listing result for version', () => { it('should skip entries inferior to next marker', () => {
const delimiter = new DelimiterMaster({}, fakeLogger); const delimiter = new DelimiterMaster({ marker: 'b' }, fakeLogger, vFormat);
const masterKey = 'key';
const versionKey1 = `${masterKey}${VID_SEP}version1`;
const versionKey2 = `${masterKey}${VID_SEP}version2`;
const value2 = 'value2';
/* Filter the PHD version. */ const listingKey = getListingKey('a', vFormat);
assert.strictEqual(delimiter.filter({ assert.strictEqual(delimiter.filter({ key: listingKey }), FILTER_SKIP);
key: masterKey, assert.strictEqual(delimiter.NextMarker, 'b');
value: '{ "isPHD": true, "value": "version" }', assert.strictEqual(delimiter.prvKey, undefined);
}), FILTER_ACCEPT); assert.deepStrictEqual(delimiter.result(), EmptyResult);
/* Filter a delete marker version. */
assert.strictEqual(delimiter.filter({
key: versionKey1,
value: '{ "isDeleteMarker": true }',
}), FILTER_ACCEPT);
/* Filter a last version with a specific value. */
assert.strictEqual(delimiter.filter({
key: versionKey2,
value: value2,
}), FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key: masterKey, value: value2 }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
}); });
});
it('should return good values for entries with different common prefixes', it('should accept a master version', () => {
() => { const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const delimiterChar = '/'; const key = 'key';
const commonPrefix1 = `commonPrefix1${delimiterChar}`; const value = '';
const commonPrefix2 = `commonPrefix2${delimiterChar}`;
const prefix1Key1 = `${commonPrefix1}key1`;
const prefix1Key2 = `${commonPrefix1}key2`;
const prefix2Key1 = `${commonPrefix2}key1`;
const value = 'value';
const delimiter = new DelimiterMaster({ delimiter: delimiterChar }, const listingKey = getListingKey(key, vFormat);
fakeLogger); assert.strictEqual(delimiter.filter({ key: listingKey, value }), FILTER_ACCEPT);
if (vFormat === 'v0') {
/* Filter the first entry with a common prefix. It should be assert.strictEqual(delimiter.prvKey, key);
* accepted and added to the result. */ }
assert.strictEqual(delimiter.filter({ key: prefix1Key1, value }), assert.strictEqual(delimiter.NextMarker, key);
FILTER_ACCEPT); assert.deepStrictEqual(delimiter.result(), {
assert.deepStrictEqual(delimiter.result(), { CommonPrefixes: [],
CommonPrefixes: [commonPrefix1], Contents: [{ key, value }],
Contents: [], IsTruncated: false,
IsTruncated: false, NextMarker: undefined,
NextMarker: undefined, Delimiter: undefined,
Delimiter: delimiterChar, });
});
/* Filter the second entry with the same common prefix than the
* first entry. It should be skipped and not added to the result. */
assert.strictEqual(delimiter.filter({ key: prefix1Key2, value }),
FILTER_SKIP);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
/* Filter an entry with a new common prefix. It should be accepted
* and not added to the result. */
assert.strictEqual(delimiter.filter({ key: prefix2Key1, value }),
FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1, commonPrefix2],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
});
/* We test here the internal management of the prvKey field of the
* DelimiterMaster class, in particular once it has been set to an entry
* key before to finally skip this entry because of an already present
* common prefix. */
it('should accept a version after skipping an object because of its ' +
'commonPrefix', () => {
const delimiterChar = '/';
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
const commonPrefix2 = `commonPrefix2${delimiterChar}`;
const prefix1Key1 = `${commonPrefix1}key1`;
const prefix1Key2 = `${commonPrefix1}key2`;
const prefix2VersionKey1 = `${commonPrefix2}key1${VID_SEP}version`;
const value = 'value';
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
fakeLogger);
/* Filter the two first entries with the same common prefix to add
* it to the result and reach the state where an entry is skipped
* because of an already present common prefix in the result. */
delimiter.filter({ key: prefix1Key1, value });
delimiter.filter({ key: prefix1Key2, value });
/* Filter an object with a key containing a version part and a new
* common prefix. It should be accepted and the new common prefix
* added to the result. */
assert.strictEqual(delimiter.filter({
key: prefix2VersionKey1,
value,
}), FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1, commonPrefix2],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
}); });
});
it('should skip a versioned entry when there is a delimiter and the key ' + it('should return good values for entries with different common prefixes', () => {
'starts with the NextMarker value', () => { const delimiterChar = '/';
const delimiterChar = '/'; const commonPrefix1 = `commonPrefix1${delimiterChar}`;
const commonPrefix = `commonPrefix${delimiterChar}`; const commonPrefix2 = `commonPrefix2${delimiterChar}`;
const key = `${commonPrefix}key${VID_SEP}version`; const prefix1Key1 = `${commonPrefix1}key1`;
const value = 'value'; const prefix1Key2 = `${commonPrefix1}key2`;
const prefix2Key1 = `${commonPrefix2}key1`;
const value = 'value';
const delimiter = new DelimiterMaster({ delimiter: delimiterChar }, const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
fakeLogger); fakeLogger, vFormat);
/* TODO: should be set to a whole key instead of just a common prefix
* once ZENKO-1048 is fixed. */
delimiter.NextMarker = commonPrefix;
assert.strictEqual(delimiter.filter({ key, value }), FILTER_SKIP); /* Filter the first entry with a common prefix. It should be
* accepted and added to the result. */
assert.strictEqual(delimiter.filter({
key: getListingKey(prefix1Key1, vFormat),
value,
}),
FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
/* Filter the second entry with the same common prefix than the
* first entry. It should be skipped and not added to the result. */
assert.strictEqual(delimiter.filter({
key: getListingKey(prefix1Key2, vFormat),
value,
}),
FILTER_SKIP);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
/* Filter an entry with a new common prefix. It should be accepted
* and not added to the result. */
assert.strictEqual(delimiter.filter({
key: getListingKey(prefix2Key1, vFormat),
value,
}),
FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1, commonPrefix2],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
});
if (vFormat === 'v0') {
it('should accept a PHD version as first input', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const keyPHD = 'keyPHD';
const objPHD = {
key: keyPHD,
value: Version.generatePHDVersion(generateVersionId('', '')),
};
/* When filtered, it should return FILTER_ACCEPT and set the prvKey.
* to undefined. It should not be added to result the content or common
* prefixes. */
assert.strictEqual(delimiter.filter(objPHD), FILTER_ACCEPT);
assert.strictEqual(delimiter.prvKey, undefined);
assert.strictEqual(delimiter.NextMarker, undefined);
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should accept a PHD version', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const key = 'keyA';
const value = '';
const keyPHD = 'keyBPHD';
const objPHD = {
key: keyPHD,
value: Version.generatePHDVersion(generateVersionId('', '')),
};
/* Filter a master version to set the NextMarker, the prvKey and add
* and element in result content. */
delimiter.filter({ key, value });
/* When filtered, it should return FILTER_ACCEPT and set the prvKey.
* to undefined. It should not be added to the result content or common
* prefixes. */
assert.strictEqual(delimiter.filter(objPHD), FILTER_ACCEPT);
assert.strictEqual(delimiter.prvKey, undefined);
assert.strictEqual(delimiter.NextMarker, key);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key, value }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
});
});
it('should accept a version after a PHD', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const masterKey = 'key';
const keyVersion = `${masterKey}${VID_SEP}version`;
const value = '';
const objPHD = {
key: masterKey,
value: Version.generatePHDVersion(generateVersionId('', '')),
};
/* Filter the PHD object. */
delimiter.filter(objPHD);
/* The filtering of the PHD object has no impact, the version is
* accepted and added to the result. */
assert.strictEqual(delimiter.filter({
key: keyVersion,
value,
}), FILTER_ACCEPT);
assert.strictEqual(delimiter.prvKey, masterKey);
assert.strictEqual(delimiter.NextMarker, masterKey);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key: masterKey, value }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
});
});
it('should accept a delete marker', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const version = new Version({ isDeleteMarker: true });
const key = 'key';
const obj = {
key: `${key}${VID_SEP}version`,
value: version.toString(),
};
/* When filtered, it should return FILTER_SKIP and set the prvKey. It
* should not be added to the result content or common prefixes. */
assert.strictEqual(delimiter.filter(obj), FILTER_SKIP);
assert.strictEqual(delimiter.NextMarker, undefined);
assert.strictEqual(delimiter.prvKey, key);
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should skip version after a delete marker', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const version = new Version({ isDeleteMarker: true });
const key = 'key';
const versionKey = `${key}${VID_SEP}version`;
delimiter.filter({ key, value: version.toString() });
assert.strictEqual(delimiter.filter({
key: versionKey,
value: 'value',
}), FILTER_SKIP);
assert.strictEqual(delimiter.NextMarker, undefined);
assert.strictEqual(delimiter.prvKey, key);
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should accept a new key after a delete marker', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const version = new Version({ isDeleteMarker: true });
const key1 = 'key1';
const key2 = 'key2';
const value = 'value';
delimiter.filter({ key: key1, value: version.toString() });
assert.strictEqual(delimiter.filter({
key: key2,
value: 'value',
}), FILTER_ACCEPT);
assert.strictEqual(delimiter.NextMarker, key2);
assert.strictEqual(delimiter.prvKey, key2);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key: key2, value }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
});
});
it('should accept the master version and skip the other ones', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const masterKey = 'key';
const masterValue = 'value';
const versionKey = `${masterKey}${VID_SEP}version`;
const versionValue = 'versionvalue';
/* Filter the master version. */
delimiter.filter({ key: masterKey, value: masterValue });
/* Version is skipped, not added to the result. The delimiter
* NextMarker and prvKey value are unmodified and set to the
* masterKey. */
assert.strictEqual(delimiter.filter({
key: versionKey,
value: versionValue,
}), FILTER_SKIP);
assert.strictEqual(delimiter.NextMarker, masterKey);
assert.strictEqual(delimiter.prvKey, masterKey);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key: masterKey, value: masterValue }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
});
});
it('should return good listing result for version', () => {
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
const masterKey = 'key';
const versionKey1 = `${masterKey}${VID_SEP}version1`;
const versionKey2 = `${masterKey}${VID_SEP}version2`;
const value2 = 'value2';
/* Filter the PHD version. */
assert.strictEqual(delimiter.filter({
key: masterKey,
value: '{ "isPHD": true, "value": "version" }',
}), FILTER_ACCEPT);
/* Filter a delete marker version. */
assert.strictEqual(delimiter.filter({
key: versionKey1,
value: '{ "isDeleteMarker": true }',
}), FILTER_ACCEPT);
/* Filter a last version with a specific value. */
assert.strictEqual(delimiter.filter({
key: versionKey2,
value: value2,
}), FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [],
Contents: [{ key: masterKey, value: value2 }],
IsTruncated: false,
NextMarker: undefined,
Delimiter: undefined,
});
});
/* We test here the internal management of the prvKey field of the
* DelimiterMaster class, in particular once it has been set to an entry
* key before to finally skip this entry because of an already present
* common prefix. */
it('should accept a version after skipping an object because of its commonPrefix', () => {
const delimiterChar = '/';
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
const commonPrefix2 = `commonPrefix2${delimiterChar}`;
const prefix1Key1 = `${commonPrefix1}key1`;
const prefix1Key2 = `${commonPrefix1}key2`;
const prefix2VersionKey1 = `${commonPrefix2}key1${VID_SEP}version`;
const value = 'value';
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
fakeLogger, vFormat);
/* Filter the two first entries with the same common prefix to add
* it to the result and reach the state where an entry is skipped
* because of an already present common prefix in the result. */
delimiter.filter({ key: prefix1Key1, value });
delimiter.filter({ key: prefix1Key2, value });
/* Filter an object with a key containing a version part and a new
* common prefix. It should be accepted and the new common prefix
* added to the result. */
assert.strictEqual(delimiter.filter({
key: prefix2VersionKey1,
value,
}), FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1, commonPrefix2],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
});
it('should skip a versioned entry when there is a delimiter and the key ' +
'starts with the NextMarker value', () => {
const delimiterChar = '/';
const commonPrefix = `commonPrefix${delimiterChar}`;
const key = `${commonPrefix}key${VID_SEP}version`;
const value = 'value';
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
fakeLogger, vFormat);
/* TODO: should be set to a whole key instead of just a common prefix
* once ZENKO-1048 is fixed. */
delimiter.NextMarker = commonPrefix;
assert.strictEqual(delimiter.filter({ key, value }), FILTER_SKIP);
});
}
}); });
}); });

View File

@ -95,10 +95,10 @@ describe('listingParamsMasterKeysV0ToV1', () => {
}, },
}, },
]; ];
testCases.forEach(testCase => { testCases.forEach(({ v0params, v1params }) => {
it(`${JSON.stringify(testCase.v0params)} => ${JSON.stringify(testCase.v1params)}`, () => { it(`${JSON.stringify(v0params)} => ${JSON.stringify(v1params)}`, () => {
const converted = listingParamsMasterKeysV0ToV1(testCase.v0params); const converted = listingParamsMasterKeysV0ToV1(v0params);
assert.deepStrictEqual(converted, testCase.v1params); assert.deepStrictEqual(converted, v1params);
}); });
}); });
}); });