Compare commits
4 Commits
developmen
...
user/jonat
Author | SHA1 | Date |
---|---|---|
Jonathan Gramain | d5844dfea4 | |
Jonathan Gramain | 7f98c7b543 | |
Jonathan Gramain | 87bdac0391 | |
Jonathan Gramain | 94c10e4383 |
1
index.js
1
index.js
|
@ -28,6 +28,7 @@ module.exports = {
|
||||||
LRUCache: require('./lib/algos/cache/LRUCache'),
|
LRUCache: require('./lib/algos/cache/LRUCache'),
|
||||||
},
|
},
|
||||||
stream: {
|
stream: {
|
||||||
|
SerialStream: require('./lib/algos/stream/SerialStream'),
|
||||||
MergeStream: require('./lib/algos/stream/MergeStream'),
|
MergeStream: require('./lib/algos/stream/MergeStream'),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
const { inc, checkLimit, listingParamsMasterKeysV0ToV1, listingParamsV0ToV0Mig,
|
||||||
FILTER_END, FILTER_ACCEPT } = require('./tools');
|
FILTER_END, FILTER_ACCEPT } = require('./tools');
|
||||||
const DEFAULT_MAX_KEYS = 1000;
|
const DEFAULT_MAX_KEYS = 1000;
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
@ -44,6 +44,18 @@ class MultipartUploads {
|
||||||
genMDParams: this.genMDParamsV0,
|
genMDParams: this.genMDParamsV0,
|
||||||
getObjectKey: this.getObjectKeyV0,
|
getObjectKey: this.getObjectKeyV0,
|
||||||
},
|
},
|
||||||
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
genMDParams: this.genMDParamsV0Mig,
|
||||||
|
getObjectKey: this.getObjectKeyV0,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v0v1]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1mig]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
},
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
genMDParams: this.genMDParamsV1,
|
genMDParams: this.genMDParamsV1,
|
||||||
getObjectKey: this.getObjectKeyV1,
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
@ -73,6 +85,11 @@ class MultipartUploads {
|
||||||
return params;
|
return params;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
genMDParamsV0Mig() {
|
||||||
|
const v0params = this.genMDParamsV0();
|
||||||
|
return listingParamsV0ToV0Mig(v0params);
|
||||||
|
}
|
||||||
|
|
||||||
genMDParamsV1() {
|
genMDParamsV1() {
|
||||||
const v0params = this.genMDParamsV0();
|
const v0params = this.genMDParamsV0();
|
||||||
return listingParamsMasterKeysV0ToV1(v0params);
|
return listingParamsMasterKeysV0ToV1(v0params);
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
const Extension = require('./Extension').default;
|
||||||
const { inc, listingParamsMasterKeysV0ToV1,
|
const { inc, listingParamsMasterKeysV0ToV1, listingParamsV0ToV0Mig,
|
||||||
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
@ -99,6 +99,21 @@ class Delimiter extends Extension {
|
||||||
getObjectKey: this.getObjectKeyV0,
|
getObjectKey: this.getObjectKeyV0,
|
||||||
skipping: this.skippingV0,
|
skipping: this.skippingV0,
|
||||||
},
|
},
|
||||||
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
genMDParams: this.genMDParamsV0Mig,
|
||||||
|
getObjectKey: this.getObjectKeyV0,
|
||||||
|
skipping: this.skippingV0,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v0v1]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1mig]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
genMDParams: this.genMDParamsV1,
|
genMDParams: this.genMDParamsV1,
|
||||||
getObjectKey: this.getObjectKeyV1,
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
@ -124,6 +139,11 @@ class Delimiter extends Extension {
|
||||||
return params;
|
return params;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
genMDParamsV0Mig() {
|
||||||
|
const v0params = this.genMDParamsV0();
|
||||||
|
return listingParamsV0ToV0Mig(v0params);
|
||||||
|
}
|
||||||
|
|
||||||
genMDParamsV1() {
|
genMDParamsV1() {
|
||||||
const params = this.genMDParamsV0();
|
const params = this.genMDParamsV0();
|
||||||
return listingParamsMasterKeysV0ToV1(params);
|
return listingParamsMasterKeysV0ToV1(params);
|
||||||
|
|
|
@ -38,6 +38,18 @@ class DelimiterMaster extends Delimiter {
|
||||||
filter: this.filterV0,
|
filter: this.filterV0,
|
||||||
skipping: this.skippingV0,
|
skipping: this.skippingV0,
|
||||||
},
|
},
|
||||||
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
filter: this.filterV0,
|
||||||
|
skipping: this.skippingV0,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v0v1]: {
|
||||||
|
filter: this.filterV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1mig]: {
|
||||||
|
filter: this.filterV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
filter: this.filterV1,
|
filter: this.filterV1,
|
||||||
skipping: this.skippingV1,
|
skipping: this.skippingV1,
|
||||||
|
|
|
@ -3,8 +3,8 @@
|
||||||
const Delimiter = require('./delimiter').Delimiter;
|
const Delimiter = require('./delimiter').Delimiter;
|
||||||
const Version = require('../../versioning/Version').Version;
|
const Version = require('../../versioning/Version').Version;
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
const { inc, listingParamsV0ToV0Mig,
|
||||||
require('./tools');
|
FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
@ -40,6 +40,21 @@ class DelimiterVersions extends Delimiter {
|
||||||
filter: this.filterV0,
|
filter: this.filterV0,
|
||||||
skipping: this.skippingV0,
|
skipping: this.skippingV0,
|
||||||
},
|
},
|
||||||
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
genMDParams: this.genMDParamsV0Mig,
|
||||||
|
filter: this.filterV0,
|
||||||
|
skipping: this.skippingV0,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v0v1]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
filter: this.filterV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1mig]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
filter: this.filterV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
genMDParams: this.genMDParamsV1,
|
genMDParams: this.genMDParamsV1,
|
||||||
filter: this.filterV1,
|
filter: this.filterV1,
|
||||||
|
@ -72,6 +87,11 @@ class DelimiterVersions extends Delimiter {
|
||||||
return params;
|
return params;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
genMDParamsV0Mig() {
|
||||||
|
const v0params = this.genMDParamsV0();
|
||||||
|
return listingParamsV0ToV0Mig(v0params);
|
||||||
|
}
|
||||||
|
|
||||||
genMDParamsV1() {
|
genMDParamsV1() {
|
||||||
// return an array of two listing params sets to ask for
|
// return an array of two listing params sets to ask for
|
||||||
// synchronized listing of M and V ranges
|
// synchronized listing of M and V ranges
|
||||||
|
|
|
@ -59,10 +59,59 @@ function listingParamsMasterKeysV0ToV1(v0params) {
|
||||||
return v1params;
|
return v1params;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function listingParamsV0ToV0Mig(v0params) {
|
||||||
|
if ((v0params.gt !== undefined && v0params.gt >= inc(DbPrefixes.V1))
|
||||||
|
|| (v0params.gte !== undefined && v0params.gte >= inc(DbPrefixes.V1))
|
||||||
|
|| (v0params.lt !== undefined && v0params.lt <= DbPrefixes.V1)) {
|
||||||
|
return v0params;
|
||||||
|
}
|
||||||
|
if ((v0params.gt !== undefined && v0params.gt >= DbPrefixes.V1)
|
||||||
|
|| (v0params.gte !== undefined && v0params.gte >= DbPrefixes.V1)) {
|
||||||
|
const v0migParams = Object.assign({}, v0params);
|
||||||
|
let greaterParam;
|
||||||
|
if (v0params.gt !== undefined) {
|
||||||
|
v0migParams.gt = inc(DbPrefixes.V1);
|
||||||
|
greaterParam = v0migParams.gt;
|
||||||
|
}
|
||||||
|
if (v0params.gte !== undefined) {
|
||||||
|
v0migParams.gte = inc(DbPrefixes.V1);
|
||||||
|
greaterParam = v0migParams.gte;
|
||||||
|
}
|
||||||
|
if (v0params.lt !== undefined && greaterParam !== undefined
|
||||||
|
&& v0params.lt <= greaterParam) {
|
||||||
|
// we annihilated the valid range during our v0mig
|
||||||
|
// transform to skip V1 prefix: return an empty range with
|
||||||
|
// a trick instead of an invalid combo
|
||||||
|
return { lt: '' };
|
||||||
|
}
|
||||||
|
return v0migParams;
|
||||||
|
}
|
||||||
|
const rangeParams1 = {
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
};
|
||||||
|
if (v0params.gt !== undefined) {
|
||||||
|
rangeParams1.gt = v0params.gt;
|
||||||
|
}
|
||||||
|
if (v0params.gte !== undefined) {
|
||||||
|
rangeParams1.gte = v0params.gte;
|
||||||
|
}
|
||||||
|
const rangeParams2 = {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
// tell RepdServer._listObject() that the second listing is to
|
||||||
|
// be done after the first, not in parallel
|
||||||
|
serial: true,
|
||||||
|
};
|
||||||
|
if (v0params.lt !== undefined) {
|
||||||
|
rangeParams2.lt = v0params.lt;
|
||||||
|
}
|
||||||
|
return [rangeParams1, rangeParams2];
|
||||||
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
checkLimit,
|
checkLimit,
|
||||||
inc,
|
inc,
|
||||||
listingParamsMasterKeysV0ToV1,
|
listingParamsMasterKeysV0ToV1,
|
||||||
|
listingParamsV0ToV0Mig,
|
||||||
SKIP_NONE,
|
SKIP_NONE,
|
||||||
FILTER_END,
|
FILTER_END,
|
||||||
FILTER_SKIP,
|
FILTER_SKIP,
|
||||||
|
|
|
@ -0,0 +1,56 @@
|
||||||
|
const stream = require('stream');
|
||||||
|
|
||||||
|
class SerialStream extends stream.Readable {
|
||||||
|
constructor(stream1, stream2) {
|
||||||
|
super({ objectMode: true });
|
||||||
|
|
||||||
|
this._streams = [stream1, stream2];
|
||||||
|
this._currentStream = stream1;
|
||||||
|
this._streamToResume = null;
|
||||||
|
|
||||||
|
stream1.on('data', item => this._onItem(stream1, item));
|
||||||
|
stream1.once('end', () => this._onEndStream1());
|
||||||
|
stream1.once('error', err => this._onError(stream1, err));
|
||||||
|
}
|
||||||
|
|
||||||
|
_read() {
|
||||||
|
if (this._streamToResume) {
|
||||||
|
this._streamToResume.resume();
|
||||||
|
this._streamToResume = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_destroy(err, callback) {
|
||||||
|
this._currentStream.destroy();
|
||||||
|
if (this._currentStream === this._streams[0]) {
|
||||||
|
this._streams[1].destroy();
|
||||||
|
}
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
|
||||||
|
_onItem(myStream, item) {
|
||||||
|
if (!this.push(item)) {
|
||||||
|
myStream.pause();
|
||||||
|
this._streamToResume = myStream;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_onEndStream1() {
|
||||||
|
// stream1 is done, now move on with data from stream2
|
||||||
|
const stream2 = this._streams[1];
|
||||||
|
stream2.on('data', item => this._onItem(stream2, item));
|
||||||
|
stream2.once('end', () => this._onEnd());
|
||||||
|
stream2.once('error', err => this._onError(stream2, err));
|
||||||
|
}
|
||||||
|
|
||||||
|
_onEnd() {
|
||||||
|
this.push(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
_onError(myStream, err) {
|
||||||
|
this.emit('error', err);
|
||||||
|
this._destroy(err, () => {});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = SerialStream;
|
|
@ -3,6 +3,7 @@ module.exports.VersioningConstants = {
|
||||||
Separator: '\0',
|
Separator: '\0',
|
||||||
},
|
},
|
||||||
DbPrefixes: {
|
DbPrefixes: {
|
||||||
|
V1: '\x7f',
|
||||||
Master: '\x7fM',
|
Master: '\x7fM',
|
||||||
Version: '\x7fV',
|
Version: '\x7fV',
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,14 +1,28 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
const assert = require('assert');
|
const assert = require('assert');
|
||||||
const MultipartUploads =
|
const MultipartUploads = require('../../../../lib/algos/list/MPU').MultipartUploads;
|
||||||
require('../../../../lib/algos/list/MPU').MultipartUploads;
|
const { inc } = require('../../../../lib/algos/list/tools');
|
||||||
const werelogs = require('werelogs').Logger;
|
const werelogs = require('werelogs').Logger;
|
||||||
// eslint-disable-next-line new-cap
|
// eslint-disable-next-line new-cap
|
||||||
const logger = new werelogs('listMpuTest');
|
const logger = new werelogs('listMpuTest');
|
||||||
const performListing = require('../../../utils/performListing');
|
const performListing = require('../../../utils/performListing');
|
||||||
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
|
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
|
||||||
const { DbPrefixes } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
|
function getListingKey(key, vFormat) {
|
||||||
|
if ([BucketVersioningKeyFormat.v0,
|
||||||
|
BucketVersioningKeyFormat.v0mig].includes(vFormat)) {
|
||||||
|
return key;
|
||||||
|
}
|
||||||
|
if ([BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1].includes(vFormat)) {
|
||||||
|
return `${DbPrefixes.Master}${key}`;
|
||||||
|
}
|
||||||
|
assert.fail(`bad vFormat ${vFormat}`);
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
describe('Multipart Uploads listing algorithm', () => {
|
describe('Multipart Uploads listing algorithm', () => {
|
||||||
const splitter = '**';
|
const splitter = '**';
|
||||||
|
@ -16,22 +30,14 @@ describe('Multipart Uploads listing algorithm', () => {
|
||||||
const storageClass = 'STANDARD';
|
const storageClass = 'STANDARD';
|
||||||
const initiator1 = { ID: '1', DisplayName: 'initiator1' };
|
const initiator1 = { ID: '1', DisplayName: 'initiator1' };
|
||||||
const initiator2 = { ID: '2', DisplayName: 'initiator2' };
|
const initiator2 = { ID: '2', DisplayName: 'initiator2' };
|
||||||
const keys = {
|
const v0keys = [
|
||||||
v0: [`${overviewPrefix}test/1${splitter}uploadId1`,
|
`${overviewPrefix}test/1${splitter}uploadId1`,
|
||||||
`${overviewPrefix}test/2${splitter}uploadId2`,
|
`${overviewPrefix}test/2${splitter}uploadId2`,
|
||||||
`${overviewPrefix}test/3${splitter}uploadId3`,
|
`${overviewPrefix}test/3${splitter}uploadId3`,
|
||||||
`${overviewPrefix}testMore/4${splitter}uploadId4`,
|
`${overviewPrefix}testMore/4${splitter}uploadId4`,
|
||||||
`${overviewPrefix}testMore/5${splitter}uploadId5`,
|
`${overviewPrefix}testMore/5${splitter}uploadId5`,
|
||||||
`${overviewPrefix}prefixTest/5${splitter}uploadId5`,
|
`${overviewPrefix}prefixTest/5${splitter}uploadId5`,
|
||||||
],
|
];
|
||||||
v1: [`${DbPrefixes.Master}${overviewPrefix}test/1${splitter}uploadId1`,
|
|
||||||
`${DbPrefixes.Master}${overviewPrefix}test/2${splitter}uploadId2`,
|
|
||||||
`${DbPrefixes.Master}${overviewPrefix}test/3${splitter}uploadId3`,
|
|
||||||
`${DbPrefixes.Master}${overviewPrefix}testMore/4${splitter}uploadId4`,
|
|
||||||
`${DbPrefixes.Master}${overviewPrefix}testMore/5${splitter}uploadId5`,
|
|
||||||
`${DbPrefixes.Master}${overviewPrefix}prefixTest/5${splitter}uploadId5`,
|
|
||||||
],
|
|
||||||
};
|
|
||||||
const values = [
|
const values = [
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
'key': 'test/1',
|
'key': 'test/1',
|
||||||
|
@ -128,9 +134,15 @@ describe('Multipart Uploads listing algorithm', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
['v0', 'v1'].forEach(vFormat => {
|
[
|
||||||
const dbListing = keys[vFormat].map((key, i) => ({
|
BucketVersioningKeyFormat.v0,
|
||||||
key,
|
BucketVersioningKeyFormat.v0mig,
|
||||||
|
BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1,
|
||||||
|
].forEach(vFormat => {
|
||||||
|
const dbListing = v0keys.map((key, i) => ({
|
||||||
|
key: getListingKey(key, vFormat),
|
||||||
value: values[i],
|
value: values[i],
|
||||||
}));
|
}));
|
||||||
it(`should perform a vFormat=${vFormat} listing of all keys`, () => {
|
it(`should perform a vFormat=${vFormat} listing of all keys`, () => {
|
||||||
|
@ -171,4 +183,90 @@ describe('Multipart Uploads listing algorithm', () => {
|
||||||
assert.deepStrictEqual(listingResult, expectedResult);
|
assert.deepStrictEqual(listingResult, expectedResult);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('MultipartUploads.genMDParams()', () => {
|
||||||
|
[{
|
||||||
|
listingParams: {
|
||||||
|
splitter,
|
||||||
|
},
|
||||||
|
mdParams: {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {},
|
||||||
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
gte: DbPrefixes.Master,
|
||||||
|
lt: inc(DbPrefixes.Master),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
listingParams: {
|
||||||
|
splitter,
|
||||||
|
prefix: 'foo/bar',
|
||||||
|
},
|
||||||
|
mdParams: {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
gte: 'foo/bar',
|
||||||
|
lt: 'foo/bas',
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gte: 'foo/bar',
|
||||||
|
lt: 'foo/bas',
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
gte: `${DbPrefixes.Master}foo/bar`,
|
||||||
|
lt: `${DbPrefixes.Master}foo/bas`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
listingParams: {
|
||||||
|
splitter,
|
||||||
|
keyMarker: 'marker',
|
||||||
|
},
|
||||||
|
mdParams: {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
gt: `${overviewPrefix}marker${inc(splitter)}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
gt: `${overviewPrefix}marker${inc(splitter)}`,
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
gt: `${DbPrefixes.Master}${overviewPrefix}marker${inc(splitter)}`,
|
||||||
|
lt: inc(DbPrefixes.Master),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}].forEach(testCase => {
|
||||||
|
[
|
||||||
|
BucketVersioningKeyFormat.v0,
|
||||||
|
BucketVersioningKeyFormat.v0mig,
|
||||||
|
BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1,
|
||||||
|
].forEach(vFormat => {
|
||||||
|
it(`with vFormat=${vFormat}, listing params ${JSON.stringify(testCase.listingParams)}`, () => {
|
||||||
|
const delimiter = new MultipartUploads(
|
||||||
|
testCase.listingParams, logger, vFormat);
|
||||||
|
const mdParams = delimiter.genMDParams();
|
||||||
|
let paramsVFormat;
|
||||||
|
if ([BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1].includes(vFormat)) {
|
||||||
|
// all above vformats are equivalent to v1 when it
|
||||||
|
// comes to generating md params
|
||||||
|
paramsVFormat = BucketVersioningKeyFormat.v1;
|
||||||
|
} else {
|
||||||
|
paramsVFormat = vFormat;
|
||||||
|
}
|
||||||
|
assert.deepStrictEqual(mdParams, testCase.mdParams[paramsVFormat]);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -11,7 +11,7 @@ const performListing = require('../../../utils/performListing');
|
||||||
const zpad = require('../../helpers').zpad;
|
const zpad = require('../../helpers').zpad;
|
||||||
const { inc } = require('../../../../lib/algos/list/tools');
|
const { inc } = require('../../../../lib/algos/list/tools');
|
||||||
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
|
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
|
||||||
const { DbPrefixes } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
class Test {
|
class Test {
|
||||||
constructor(name, input, genMDParams, output, filter) {
|
constructor(name, input, genMDParams, output, filter) {
|
||||||
|
@ -90,8 +90,14 @@ const receivedNonAlphaData = nonAlphabeticalData.map(
|
||||||
|
|
||||||
const tests = [
|
const tests = [
|
||||||
new Test('all elements', {}, {
|
new Test('all elements', {}, {
|
||||||
v0: {},
|
[BucketVersioningKeyFormat.v0]: {},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gte: DbPrefixes.Master,
|
gte: DbPrefixes.Master,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
},
|
},
|
||||||
|
@ -105,10 +111,17 @@ const tests = [
|
||||||
new Test('with valid marker', {
|
new Test('with valid marker', {
|
||||||
marker: receivedData[4].key,
|
marker: receivedData[4].key,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: receivedData[4].key,
|
gt: receivedData[4].key,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
gt: receivedData[4].key,
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}${receivedData[4].key}`,
|
gt: `${DbPrefixes.Master}${receivedData[4].key}`,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
},
|
},
|
||||||
|
@ -129,10 +142,17 @@ const tests = [
|
||||||
marker: 'zzzz',
|
marker: 'zzzz',
|
||||||
delimiter: '/',
|
delimiter: '/',
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'zzzz',
|
gt: 'zzzz',
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
gt: 'zzzz',
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}zzzz`,
|
gt: `${DbPrefixes.Master}zzzz`,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
},
|
},
|
||||||
|
@ -146,8 +166,14 @@ const tests = [
|
||||||
new Test('with makKeys', {
|
new Test('with makKeys', {
|
||||||
maxKeys: 3,
|
maxKeys: 3,
|
||||||
}, {
|
}, {
|
||||||
v0: {},
|
[BucketVersioningKeyFormat.v0]: {},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gte: DbPrefixes.Master,
|
gte: DbPrefixes.Master,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
},
|
},
|
||||||
|
@ -161,8 +187,14 @@ const tests = [
|
||||||
new Test('with big makKeys', {
|
new Test('with big makKeys', {
|
||||||
maxKeys: 15000,
|
maxKeys: 15000,
|
||||||
}, {
|
}, {
|
||||||
v0: {},
|
[BucketVersioningKeyFormat.v0]: {},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gte: DbPrefixes.Master,
|
gte: DbPrefixes.Master,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
},
|
},
|
||||||
|
@ -176,8 +208,14 @@ const tests = [
|
||||||
new Test('with delimiter', {
|
new Test('with delimiter', {
|
||||||
delimiter: '/',
|
delimiter: '/',
|
||||||
}, {
|
}, {
|
||||||
v0: {},
|
[BucketVersioningKeyFormat.v0]: {},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gte: DbPrefixes.Master,
|
gte: DbPrefixes.Master,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
},
|
},
|
||||||
|
@ -193,8 +231,14 @@ const tests = [
|
||||||
new Test('with long delimiter', {
|
new Test('with long delimiter', {
|
||||||
delimiter: 'notes/summer',
|
delimiter: 'notes/summer',
|
||||||
}, {
|
}, {
|
||||||
v0: {},
|
[BucketVersioningKeyFormat.v0]: {},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gte: DbPrefixes.Master,
|
gte: DbPrefixes.Master,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
},
|
},
|
||||||
|
@ -218,11 +262,15 @@ const tests = [
|
||||||
prefix: 'notes/summer/',
|
prefix: 'notes/summer/',
|
||||||
marker: 'notes/summer0',
|
marker: 'notes/summer0',
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: `notes/summer${inc('/')}`,
|
gt: `notes/summer${inc('/')}`,
|
||||||
lt: `notes/summer${inc('/')}`,
|
lt: `notes/summer${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: `notes/summer${inc('/')}`,
|
||||||
|
lt: `notes/summer${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
gt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
||||||
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -237,11 +285,15 @@ const tests = [
|
||||||
delimiter: '/',
|
delimiter: '/',
|
||||||
prefix: 'notes/',
|
prefix: 'notes/',
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gte: 'notes/',
|
gte: 'notes/',
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gte: 'notes/',
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gte: `${DbPrefixes.Master}notes/`,
|
gte: `${DbPrefixes.Master}notes/`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -264,11 +316,15 @@ const tests = [
|
||||||
prefix: 'notes/',
|
prefix: 'notes/',
|
||||||
marker: 'notes/year.txt',
|
marker: 'notes/year.txt',
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'notes/year.txt',
|
gt: 'notes/year.txt',
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: 'notes/year.txt',
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}notes/year.txt`,
|
gt: `${DbPrefixes.Master}notes/year.txt`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -289,11 +345,15 @@ const tests = [
|
||||||
marker: 'notes/',
|
marker: 'notes/',
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'notes/',
|
gt: 'notes/',
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: 'notes/',
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}notes/`,
|
gt: `${DbPrefixes.Master}notes/`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -311,11 +371,15 @@ const tests = [
|
||||||
marker: 'notes/spring/',
|
marker: 'notes/spring/',
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'notes/spring/',
|
gt: 'notes/spring/',
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: 'notes/spring/',
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}notes/spring/`,
|
gt: `${DbPrefixes.Master}notes/spring/`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -333,11 +397,15 @@ const tests = [
|
||||||
marker: 'notes/summer/',
|
marker: 'notes/summer/',
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'notes/summer/',
|
gt: 'notes/summer/',
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: 'notes/summer/',
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}notes/summer/`,
|
gt: `${DbPrefixes.Master}notes/summer/`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -357,11 +425,15 @@ const tests = [
|
||||||
marker: 'notes/year.txt',
|
marker: 'notes/year.txt',
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'notes/year.txt',
|
gt: 'notes/year.txt',
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: 'notes/year.txt',
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}notes/year.txt`,
|
gt: `${DbPrefixes.Master}notes/year.txt`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -381,11 +453,15 @@ const tests = [
|
||||||
marker: 'notes/yore.rs',
|
marker: 'notes/yore.rs',
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'notes/yore.rs',
|
gt: 'notes/yore.rs',
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: 'notes/yore.rs',
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}notes/yore.rs`,
|
gt: `${DbPrefixes.Master}notes/yore.rs`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -400,8 +476,14 @@ const tests = [
|
||||||
new Test('all elements v2', {
|
new Test('all elements v2', {
|
||||||
v2: true,
|
v2: true,
|
||||||
}, {
|
}, {
|
||||||
v0: {},
|
[BucketVersioningKeyFormat.v0]: {},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gte: DbPrefixes.Master,
|
gte: DbPrefixes.Master,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
},
|
},
|
||||||
|
@ -416,10 +498,17 @@ const tests = [
|
||||||
startAfter: receivedData[4].key,
|
startAfter: receivedData[4].key,
|
||||||
v2: true,
|
v2: true,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: receivedData[4].key,
|
gt: receivedData[4].key,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
gt: receivedData[4].key,
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}${receivedData[4].key}`,
|
gt: `${DbPrefixes.Master}${receivedData[4].key}`,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
},
|
},
|
||||||
|
@ -441,10 +530,17 @@ const tests = [
|
||||||
delimiter: '/',
|
delimiter: '/',
|
||||||
v2: true,
|
v2: true,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'zzzz',
|
gt: 'zzzz',
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
gt: 'zzzz',
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}zzzz`,
|
gt: `${DbPrefixes.Master}zzzz`,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
},
|
},
|
||||||
|
@ -459,10 +555,17 @@ const tests = [
|
||||||
continuationToken: receivedData[4].key,
|
continuationToken: receivedData[4].key,
|
||||||
v2: true,
|
v2: true,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: receivedData[4].key,
|
gt: receivedData[4].key,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
gt: receivedData[4].key,
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}${receivedData[4].key}`,
|
gt: `${DbPrefixes.Master}${receivedData[4].key}`,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
},
|
},
|
||||||
|
@ -484,10 +587,17 @@ const tests = [
|
||||||
delimiter: '/',
|
delimiter: '/',
|
||||||
v2: true,
|
v2: true,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'zzzz',
|
gt: 'zzzz',
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
gt: 'zzzz',
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}zzzz`,
|
gt: `${DbPrefixes.Master}zzzz`,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
},
|
},
|
||||||
|
@ -503,11 +613,15 @@ const tests = [
|
||||||
prefix: 'notes/summer/',
|
prefix: 'notes/summer/',
|
||||||
startAfter: 'notes/summer0',
|
startAfter: 'notes/summer0',
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gte: 'notes/summer/',
|
gte: 'notes/summer/',
|
||||||
lt: `notes/summer${inc('/')}`,
|
lt: `notes/summer${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gte: 'notes/summer/',
|
||||||
|
lt: `notes/summer${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gte: `${DbPrefixes.Master}notes/summer/`,
|
gte: `${DbPrefixes.Master}notes/summer/`,
|
||||||
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -523,11 +637,15 @@ const tests = [
|
||||||
prefix: 'notes/summer/',
|
prefix: 'notes/summer/',
|
||||||
continuationToken: 'notes/summer0',
|
continuationToken: 'notes/summer0',
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gte: 'notes/summer/',
|
gte: 'notes/summer/',
|
||||||
lt: `notes/summer${inc('/')}`,
|
lt: `notes/summer${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gte: 'notes/summer/',
|
||||||
|
lt: `notes/summer${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gte: `${DbPrefixes.Master}notes/summer/`,
|
gte: `${DbPrefixes.Master}notes/summer/`,
|
||||||
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -544,10 +662,17 @@ const tests = [
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
v2: true,
|
v2: true,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'notes/year.txt',
|
gt: 'notes/year.txt',
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
gt: 'notes/year.txt',
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}notes/year.txt`,
|
gt: `${DbPrefixes.Master}notes/year.txt`,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
},
|
},
|
||||||
|
@ -568,11 +693,15 @@ const tests = [
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
v2: true,
|
v2: true,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'notes/',
|
gt: 'notes/',
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: 'notes/',
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}notes/`,
|
gt: `${DbPrefixes.Master}notes/`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -591,11 +720,15 @@ const tests = [
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
v2: true,
|
v2: true,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'notes/spring/',
|
gt: 'notes/spring/',
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: 'notes/spring/',
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}notes/spring/`,
|
gt: `${DbPrefixes.Master}notes/spring/`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -614,11 +747,15 @@ const tests = [
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
v2: true,
|
v2: true,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'notes/summer/',
|
gt: 'notes/summer/',
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: 'notes/summer/',
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}notes/summer/`,
|
gt: `${DbPrefixes.Master}notes/summer/`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -639,11 +776,15 @@ const tests = [
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
v2: true,
|
v2: true,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'notes/year.txt',
|
gt: 'notes/year.txt',
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: 'notes/year.txt',
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}notes/year.txt`,
|
gt: `${DbPrefixes.Master}notes/year.txt`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -664,11 +805,15 @@ const tests = [
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
v2: true,
|
v2: true,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: 'notes/yore.rs',
|
gt: 'notes/yore.rs',
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: {
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: 'notes/yore.rs',
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
gt: `${DbPrefixes.Master}notes/yore.rs`,
|
gt: `${DbPrefixes.Master}notes/yore.rs`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
|
@ -680,6 +825,109 @@ const tests = [
|
||||||
NextContinuationToken: undefined,
|
NextContinuationToken: undefined,
|
||||||
}, (e, input) => e.key > input.startAfter),
|
}, (e, input) => e.key > input.startAfter),
|
||||||
|
|
||||||
|
new Test('with startAfter after vformat V1 prefix', {
|
||||||
|
delimiter: '/',
|
||||||
|
startAfter: 'éléphant pâle',
|
||||||
|
maxKeys: 3,
|
||||||
|
v2: true,
|
||||||
|
}, {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
gt: 'éléphant pâle',
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: 'éléphant pâle',
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
gt: `${DbPrefixes.Master}éléphant pâle`,
|
||||||
|
lt: inc(DbPrefixes.Master),
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
Contents: [],
|
||||||
|
CommonPrefixes: [],
|
||||||
|
Delimiter: '/',
|
||||||
|
IsTruncated: false,
|
||||||
|
NextContinuationToken: undefined,
|
||||||
|
}, (e, input) => e.key > input.startAfter),
|
||||||
|
|
||||||
|
new Test('with startAfter inside vformat V1 prefix', {
|
||||||
|
delimiter: '/',
|
||||||
|
startAfter: `${DbPrefixes.V1}foo`,
|
||||||
|
maxKeys: 3,
|
||||||
|
v2: true,
|
||||||
|
}, {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
gt: `${DbPrefixes.V1}foo`,
|
||||||
|
},
|
||||||
|
// v0mig skips all V1-prefixed keys to start at the beginning
|
||||||
|
// of the second v0 range (to skip V1 keys being migrated)
|
||||||
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: inc(DbPrefixes.V1),
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
gt: `${DbPrefixes.Master}${DbPrefixes.V1}foo`,
|
||||||
|
lt: inc(DbPrefixes.Master),
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
Contents: [],
|
||||||
|
CommonPrefixes: [],
|
||||||
|
Delimiter: '/',
|
||||||
|
IsTruncated: false,
|
||||||
|
NextContinuationToken: undefined,
|
||||||
|
}, (e, input) => e.key > input.startAfter),
|
||||||
|
|
||||||
|
new Test('with prefix after vformat V1 prefix', {
|
||||||
|
delimiter: '/',
|
||||||
|
prefix: 'éléphant pâle/',
|
||||||
|
maxKeys: 3,
|
||||||
|
v2: true,
|
||||||
|
}, {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
gte: 'éléphant pâle/',
|
||||||
|
lt: 'éléphant pâle0',
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gte: 'éléphant pâle/',
|
||||||
|
lt: 'éléphant pâle0',
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
gte: `${DbPrefixes.Master}éléphant pâle/`,
|
||||||
|
lt: `${DbPrefixes.Master}éléphant pâle0`,
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
Contents: [],
|
||||||
|
CommonPrefixes: [],
|
||||||
|
Delimiter: '/',
|
||||||
|
IsTruncated: false,
|
||||||
|
NextContinuationToken: undefined,
|
||||||
|
}, (e, input) => e.key > input.startAfter),
|
||||||
|
|
||||||
|
new Test('with prefix inside vformat V1 prefix', {
|
||||||
|
delimiter: '/',
|
||||||
|
prefix: `${DbPrefixes.V1}foo/`,
|
||||||
|
maxKeys: 3,
|
||||||
|
v2: true,
|
||||||
|
}, {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
gte: `${DbPrefixes.V1}foo/`,
|
||||||
|
lt: `${DbPrefixes.V1}foo0`,
|
||||||
|
},
|
||||||
|
// v0mig skips the V1 prefix altogether to avoid returning V1
|
||||||
|
// keys being migrated. It uses a trick: passing "lt: ''"
|
||||||
|
// (empty string) forces to list an empty range
|
||||||
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
lt: '',
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
gte: `${DbPrefixes.Master}${DbPrefixes.V1}foo/`,
|
||||||
|
lt: `${DbPrefixes.Master}${DbPrefixes.V1}foo0`,
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
Contents: [],
|
||||||
|
CommonPrefixes: [],
|
||||||
|
Delimiter: '/',
|
||||||
|
IsTruncated: false,
|
||||||
|
NextContinuationToken: undefined,
|
||||||
|
}, (e, input) => e.key > input.startAfter),
|
||||||
];
|
];
|
||||||
|
|
||||||
const alphabeticalOrderTests = [
|
const alphabeticalOrderTests = [
|
||||||
|
@ -708,10 +956,13 @@ function getTestListing(test, data, vFormat) {
|
||||||
return data
|
return data
|
||||||
.filter(e => test.filter(e, test.input))
|
.filter(e => test.filter(e, test.input))
|
||||||
.map(obj => {
|
.map(obj => {
|
||||||
if (vFormat === 'v0') {
|
if ([BucketVersioningKeyFormat.v0,
|
||||||
|
BucketVersioningKeyFormat.v0mig].includes(vFormat)) {
|
||||||
return obj;
|
return obj;
|
||||||
}
|
}
|
||||||
if (vFormat === 'v1') {
|
if ([BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1].includes(vFormat)) {
|
||||||
return {
|
return {
|
||||||
key: `${DbPrefixes.Master}${obj.key}`,
|
key: `${DbPrefixes.Master}${obj.key}`,
|
||||||
value: obj.value,
|
value: obj.value,
|
||||||
|
@ -721,18 +972,39 @@ function getTestListing(test, data, vFormat) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
['v0', 'v1'].forEach(vFormat => {
|
[
|
||||||
|
BucketVersioningKeyFormat.v0,
|
||||||
|
BucketVersioningKeyFormat.v0mig,
|
||||||
|
BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1,
|
||||||
|
].forEach(vFormat => {
|
||||||
describe(`vFormat=${vFormat} Delimiter listing algorithm`, () => {
|
describe(`vFormat=${vFormat} Delimiter listing algorithm`, () => {
|
||||||
it('Should return good skipping value for DelimiterMaster', () => {
|
it('Should return good skipping value for DelimiterMaster', () => {
|
||||||
const delimiter = new DelimiterMaster({ delimiter: '/' });
|
const delimiter = new DelimiterMaster({ delimiter: '/' });
|
||||||
for (let i = 0; i < 100; i++) {
|
for (let i = 0; i < 100; i++) {
|
||||||
|
let key;
|
||||||
|
if ([BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1].includes(vFormat)) {
|
||||||
|
key = `${DbPrefixes.Master}foo/${zpad(i)}`;
|
||||||
|
} else {
|
||||||
|
key = `foo/${zpad(i)}`;
|
||||||
|
}
|
||||||
delimiter.filter({
|
delimiter.filter({
|
||||||
key: `${vFormat === 'v1' ? DbPrefixes.Master : ''}foo/${zpad(i)}`,
|
key,
|
||||||
value: '{}',
|
value: '{}',
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
assert.strictEqual(delimiter.skipping(),
|
let skipping;
|
||||||
`${vFormat === 'v1' ? DbPrefixes.Master : ''}foo/`);
|
if ([BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1].includes(vFormat)) {
|
||||||
|
skipping = `${DbPrefixes.Master}foo/`;
|
||||||
|
} else {
|
||||||
|
skipping = 'foo/';
|
||||||
|
}
|
||||||
|
assert.strictEqual(delimiter.skipping(), skipping);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should set Delimiter alphabeticalOrder field to the expected value', () => {
|
it('Should set Delimiter alphabeticalOrder field to the expected value', () => {
|
||||||
|
@ -748,7 +1020,17 @@ function getTestListing(test, data, vFormat) {
|
||||||
it(`Should return metadata listing params to list ${test.name}`, () => {
|
it(`Should return metadata listing params to list ${test.name}`, () => {
|
||||||
const listing = new Delimiter(test.input, logger, vFormat);
|
const listing = new Delimiter(test.input, logger, vFormat);
|
||||||
const params = listing.genMDParams();
|
const params = listing.genMDParams();
|
||||||
assert.deepStrictEqual(params, test.genMDParams[vFormat]);
|
let paramsVFormat;
|
||||||
|
if ([BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1].includes(vFormat)) {
|
||||||
|
// all above vformats are equivalent to v1 when it
|
||||||
|
// comes to generating md params
|
||||||
|
paramsVFormat = BucketVersioningKeyFormat.v1;
|
||||||
|
} else {
|
||||||
|
paramsVFormat = vFormat;
|
||||||
|
}
|
||||||
|
assert.deepStrictEqual(params, test.genMDParams[paramsVFormat]);
|
||||||
});
|
});
|
||||||
it(`Should list ${test.name}`, () => {
|
it(`Should list ${test.name}`, () => {
|
||||||
// Simulate skip scan done by LevelDB
|
// Simulate skip scan done by LevelDB
|
||||||
|
@ -759,7 +1041,7 @@ function getTestListing(test, data, vFormat) {
|
||||||
});
|
});
|
||||||
|
|
||||||
// Only v0 gets a listing of master and version keys together.
|
// Only v0 gets a listing of master and version keys together.
|
||||||
if (vFormat === 'v0') {
|
if (vFormat === BucketVersioningKeyFormat.v0) {
|
||||||
tests.forEach(test => {
|
tests.forEach(test => {
|
||||||
it(`Should list master versions ${test.name}`, () => {
|
it(`Should list master versions ${test.name}`, () => {
|
||||||
// Simulate skip scan done by LevelDB
|
// Simulate skip scan done by LevelDB
|
||||||
|
|
|
@ -13,7 +13,7 @@ const VSConst =
|
||||||
require('../../../../lib/versioning/constants').VersioningConstants;
|
require('../../../../lib/versioning/constants').VersioningConstants;
|
||||||
const Version = require('../../../../lib/versioning/Version').Version;
|
const Version = require('../../../../lib/versioning/Version').Version;
|
||||||
const { generateVersionId } = require('../../../../lib/versioning/VersionID');
|
const { generateVersionId } = require('../../../../lib/versioning/VersionID');
|
||||||
const { DbPrefixes } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
@ -35,16 +35,25 @@ const fakeLogger = {
|
||||||
};
|
};
|
||||||
|
|
||||||
function getListingKey(key, vFormat) {
|
function getListingKey(key, vFormat) {
|
||||||
if (vFormat === 'v0') {
|
if ([BucketVersioningKeyFormat.v0,
|
||||||
|
BucketVersioningKeyFormat.v0mig].includes(vFormat)) {
|
||||||
return key;
|
return key;
|
||||||
}
|
}
|
||||||
if (vFormat === 'v1') {
|
if ([BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1].includes(vFormat)) {
|
||||||
return `${DbPrefixes.Master}${key}`;
|
return `${DbPrefixes.Master}${key}`;
|
||||||
}
|
}
|
||||||
return assert.fail(`bad vFormat ${vFormat}`);
|
return assert.fail(`bad vFormat ${vFormat}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
['v0', 'v1'].forEach(vFormat => {
|
[
|
||||||
|
BucketVersioningKeyFormat.v0,
|
||||||
|
BucketVersioningKeyFormat.v0mig,
|
||||||
|
BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1,
|
||||||
|
].forEach(vFormat => {
|
||||||
describe(`Delimiter All masters listing algorithm vFormat=${vFormat}`, () => {
|
describe(`Delimiter All masters listing algorithm vFormat=${vFormat}`, () => {
|
||||||
it('should return SKIP_NONE for DelimiterMaster when both NextMarker ' +
|
it('should return SKIP_NONE for DelimiterMaster when both NextMarker ' +
|
||||||
'and NextContinuationToken are undefined', () => {
|
'and NextContinuationToken are undefined', () => {
|
||||||
|
@ -102,7 +111,9 @@ function getListingKey(key, vFormat) {
|
||||||
/* When a delimiter is set and the NextMarker ends with the
|
/* When a delimiter is set and the NextMarker ends with the
|
||||||
* delimiter it should return the next marker value. */
|
* delimiter it should return the next marker value. */
|
||||||
assert.strictEqual(delimiter.NextMarker, keyWithEndingDelimiter);
|
assert.strictEqual(delimiter.NextMarker, keyWithEndingDelimiter);
|
||||||
const skipKey = vFormat === 'v1' ?
|
const skipKey = [BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1].includes(vFormat) ?
|
||||||
`${DbPrefixes.Master}${keyWithEndingDelimiter}` :
|
`${DbPrefixes.Master}${keyWithEndingDelimiter}` :
|
||||||
keyWithEndingDelimiter;
|
keyWithEndingDelimiter;
|
||||||
assert.strictEqual(delimiter.skipping(), skipKey);
|
assert.strictEqual(delimiter.skipping(), skipKey);
|
||||||
|
@ -135,7 +146,8 @@ function getListingKey(key, vFormat) {
|
||||||
|
|
||||||
const listingKey = getListingKey(key, vFormat);
|
const listingKey = getListingKey(key, vFormat);
|
||||||
assert.strictEqual(delimiter.filter({ key: listingKey, value }), FILTER_ACCEPT);
|
assert.strictEqual(delimiter.filter({ key: listingKey, value }), FILTER_ACCEPT);
|
||||||
if (vFormat === 'v0') {
|
if ([BucketVersioningKeyFormat.v0,
|
||||||
|
BucketVersioningKeyFormat.v0mig].includes(vFormat)) {
|
||||||
assert.strictEqual(delimiter.prvKey, key);
|
assert.strictEqual(delimiter.prvKey, key);
|
||||||
}
|
}
|
||||||
assert.strictEqual(delimiter.NextMarker, key);
|
assert.strictEqual(delimiter.NextMarker, key);
|
||||||
|
@ -206,7 +218,8 @@ function getListingKey(key, vFormat) {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
if (vFormat === 'v0') {
|
if ([BucketVersioningKeyFormat.v0,
|
||||||
|
BucketVersioningKeyFormat.v0mig].includes(vFormat)) {
|
||||||
it('should accept a PHD version as first input', () => {
|
it('should accept a PHD version as first input', () => {
|
||||||
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
|
const delimiter = new DelimiterMaster({}, fakeLogger, vFormat);
|
||||||
const keyPHD = 'keyPHD';
|
const keyPHD = 'keyPHD';
|
||||||
|
|
|
@ -9,7 +9,7 @@ const performListing = require('../../../utils/performListing');
|
||||||
const zpad = require('../../helpers').zpad;
|
const zpad = require('../../helpers').zpad;
|
||||||
const { inc } = require('../../../../lib/algos/list/tools');
|
const { inc } = require('../../../../lib/algos/list/tools');
|
||||||
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
|
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
|
||||||
const { DbPrefixes } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
|
||||||
class Test {
|
class Test {
|
||||||
|
@ -32,8 +32,8 @@ const bar = '{"versionId":"bar"}';
|
||||||
const qux = '{"versionId":"qux"}';
|
const qux = '{"versionId":"qux"}';
|
||||||
const valuePHD = '{"isPHD":"true","versionId":"1234567890abcdefg"}';
|
const valuePHD = '{"isPHD":"true","versionId":"1234567890abcdefg"}';
|
||||||
const valueDeleteMarker = '{"hello":"world","isDeleteMarker":"true"}';
|
const valueDeleteMarker = '{"hello":"world","isDeleteMarker":"true"}';
|
||||||
const dataVersioned = {
|
|
||||||
v0: [
|
const rawListingData = [
|
||||||
{ key: 'Pâtisserie=中文-español-English', value: bar },
|
{ key: 'Pâtisserie=中文-español-English', value: bar },
|
||||||
{ key: `Pâtisserie=中文-español-English${VID_SEP}bar`, value: bar },
|
{ key: `Pâtisserie=中文-español-English${VID_SEP}bar`, value: bar },
|
||||||
{ key: `Pâtisserie=中文-español-English${VID_SEP}foo`, value: foo },
|
{ key: `Pâtisserie=中文-español-English${VID_SEP}foo`, value: foo },
|
||||||
|
@ -66,41 +66,8 @@ const dataVersioned = {
|
||||||
{ key: 'notes/year.txt', value },
|
{ key: 'notes/year.txt', value },
|
||||||
{ key: 'notes/yore.rs', value },
|
{ key: 'notes/yore.rs', value },
|
||||||
{ key: 'notes/zaphod/Beeblebrox.txt', value },
|
{ key: 'notes/zaphod/Beeblebrox.txt', value },
|
||||||
],
|
];
|
||||||
v1: [ // we add M and V prefixes in getTestListing() due to the
|
|
||||||
// test cases needing the original key to filter
|
|
||||||
{ key: 'Pâtisserie=中文-español-English', value: bar },
|
|
||||||
{ key: `Pâtisserie=中文-español-English${VID_SEP}bar`, value: bar },
|
|
||||||
{ key: `Pâtisserie=中文-español-English${VID_SEP}foo`, value: foo },
|
|
||||||
{ key: 'notes/spring/1.txt', value: bar },
|
|
||||||
{ key: `notes/spring/1.txt${VID_SEP}bar`, value: bar },
|
|
||||||
{ key: `notes/spring/1.txt${VID_SEP}foo`, value: foo },
|
|
||||||
{ key: `notes/spring/1.txt${VID_SEP}qux`, value: qux },
|
|
||||||
{ key: `notes/spring/2.txt${VID_SEP}bar`, value: valueDeleteMarker },
|
|
||||||
{ key: `notes/spring/2.txt${VID_SEP}foo`, value: foo },
|
|
||||||
{ key: 'notes/spring/march/1.txt',
|
|
||||||
value: '{"versionId":"null","isNull":true}' },
|
|
||||||
{ key: `notes/spring/march/1.txt${VID_SEP}bar`, value: bar },
|
|
||||||
{ key: `notes/spring/march/1.txt${VID_SEP}foo`, value: foo },
|
|
||||||
{ key: 'notes/summer/1.txt', value: bar },
|
|
||||||
{ key: `notes/summer/1.txt${VID_SEP}bar`, value: bar },
|
|
||||||
{ key: `notes/summer/1.txt${VID_SEP}foo`, value: foo },
|
|
||||||
{ key: 'notes/summer/2.txt', value: bar },
|
|
||||||
{ key: `notes/summer/2.txt${VID_SEP}bar`, value: bar },
|
|
||||||
{ key: `notes/summer/4.txt${VID_SEP}bar`, value: valueDeleteMarker },
|
|
||||||
{ key: `notes/summer/4.txt${VID_SEP}foo`, value: valueDeleteMarker },
|
|
||||||
{ key: `notes/summer/4.txt${VID_SEP}qux`, value: valueDeleteMarker },
|
|
||||||
// Compared to v0, the two following keys are version keys
|
|
||||||
// that we give a version ID, because delete markers do not
|
|
||||||
// have a master key in v1.
|
|
||||||
{ key: `notes/summer/444.txt${VID_SEP}null`, value: valueDeleteMarker },
|
|
||||||
{ key: `notes/summer/44444.txt${VID_SEP}null`, value: valueDeleteMarker },
|
|
||||||
{ key: 'notes/summer/august/1.txt', value },
|
|
||||||
{ key: 'notes/year.txt', value },
|
|
||||||
{ key: 'notes/yore.rs', value },
|
|
||||||
{ key: 'notes/zaphod/Beeblebrox.txt', value },
|
|
||||||
],
|
|
||||||
};
|
|
||||||
const receivedData = [
|
const receivedData = [
|
||||||
{ key: 'Pâtisserie=中文-español-English', value: bar, versionId: 'bar' },
|
{ key: 'Pâtisserie=中文-español-English', value: bar, versionId: 'bar' },
|
||||||
{ key: 'Pâtisserie=中文-español-English', value: foo, versionId: 'foo' },
|
{ key: 'Pâtisserie=中文-español-English', value: foo, versionId: 'foo' },
|
||||||
|
@ -130,9 +97,20 @@ const receivedData = [
|
||||||
];
|
];
|
||||||
const tests = [
|
const tests = [
|
||||||
new Test('all versions', {}, {
|
new Test('all versions', {}, {
|
||||||
v0: {},
|
[BucketVersioningKeyFormat.v0]: {},
|
||||||
v1: [{ gte: DbPrefixes.Master, lt: inc(DbPrefixes.Master) },
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
{ gte: DbPrefixes.Version, lt: inc(DbPrefixes.Version) }],
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
|
gte: DbPrefixes.Master,
|
||||||
|
lt: inc(DbPrefixes.Master),
|
||||||
|
}, {
|
||||||
|
gte: DbPrefixes.Version,
|
||||||
|
lt: inc(DbPrefixes.Version),
|
||||||
|
}],
|
||||||
}, {
|
}, {
|
||||||
Versions: receivedData,
|
Versions: receivedData,
|
||||||
CommonPrefixes: [],
|
CommonPrefixes: [],
|
||||||
|
@ -144,10 +122,17 @@ const tests = [
|
||||||
new Test('with valid key marker', {
|
new Test('with valid key marker', {
|
||||||
keyMarker: receivedData[3].key,
|
keyMarker: receivedData[3].key,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: `${receivedData[3].key}\u0001`,
|
gt: `${receivedData[3].key}\u0001`,
|
||||||
},
|
},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
gt: `${receivedData[3].key}\u0001`,
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gt: `${DbPrefixes.Master}${receivedData[3].key}${inc(VID_SEP)}`,
|
gt: `${DbPrefixes.Master}${receivedData[3].key}${inc(VID_SEP)}`,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
}, {
|
}, {
|
||||||
|
@ -166,10 +151,17 @@ const tests = [
|
||||||
keyMarker: 'zzzz',
|
keyMarker: 'zzzz',
|
||||||
delimiter: '/',
|
delimiter: '/',
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: `zzzz${inc(VID_SEP)}`,
|
gt: `zzzz${inc(VID_SEP)}`,
|
||||||
},
|
},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
gt: `zzzz${inc(VID_SEP)}`,
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gt: `${DbPrefixes.Master}zzzz${inc(VID_SEP)}`,
|
gt: `${DbPrefixes.Master}zzzz${inc(VID_SEP)}`,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
}, {
|
}, {
|
||||||
|
@ -187,8 +179,14 @@ const tests = [
|
||||||
new Test('with maxKeys', {
|
new Test('with maxKeys', {
|
||||||
maxKeys: 3,
|
maxKeys: 3,
|
||||||
}, {
|
}, {
|
||||||
v0: {},
|
[BucketVersioningKeyFormat.v0]: {},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gte: DbPrefixes.Master,
|
gte: DbPrefixes.Master,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
}, {
|
}, {
|
||||||
|
@ -206,8 +204,14 @@ const tests = [
|
||||||
new Test('with big maxKeys', {
|
new Test('with big maxKeys', {
|
||||||
maxKeys: 15000,
|
maxKeys: 15000,
|
||||||
}, {
|
}, {
|
||||||
v0: {},
|
[BucketVersioningKeyFormat.v0]: {},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gte: DbPrefixes.Master,
|
gte: DbPrefixes.Master,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
}, {
|
}, {
|
||||||
|
@ -225,8 +229,14 @@ const tests = [
|
||||||
new Test('with delimiter', {
|
new Test('with delimiter', {
|
||||||
delimiter: '/',
|
delimiter: '/',
|
||||||
}, {
|
}, {
|
||||||
v0: {},
|
[BucketVersioningKeyFormat.v0]: {},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gte: DbPrefixes.Master,
|
gte: DbPrefixes.Master,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
}, {
|
}, {
|
||||||
|
@ -247,8 +257,14 @@ const tests = [
|
||||||
new Test('with long delimiter', {
|
new Test('with long delimiter', {
|
||||||
delimiter: 'notes/summer',
|
delimiter: 'notes/summer',
|
||||||
}, {
|
}, {
|
||||||
v0: {},
|
[BucketVersioningKeyFormat.v0]: {},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: [{
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gte: DbPrefixes.Master,
|
gte: DbPrefixes.Master,
|
||||||
lt: inc(DbPrefixes.Master),
|
lt: inc(DbPrefixes.Master),
|
||||||
}, {
|
}, {
|
||||||
|
@ -269,11 +285,15 @@ const tests = [
|
||||||
prefix: 'notes/summer/',
|
prefix: 'notes/summer/',
|
||||||
keyMarker: 'notes/summer0',
|
keyMarker: 'notes/summer0',
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: `notes/summer0${inc(VID_SEP)}`,
|
gt: `notes/summer0${inc(VID_SEP)}`,
|
||||||
lt: `notes/summer${inc('/')}`,
|
lt: `notes/summer${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: `notes/summer0${inc(VID_SEP)}`,
|
||||||
|
lt: `notes/summer${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gt: `${DbPrefixes.Master}notes/summer0${inc(VID_SEP)}`,
|
gt: `${DbPrefixes.Master}notes/summer0${inc(VID_SEP)}`,
|
||||||
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes/summer${inc('/')}`,
|
||||||
}, {
|
}, {
|
||||||
|
@ -292,11 +312,15 @@ const tests = [
|
||||||
delimiter: '/',
|
delimiter: '/',
|
||||||
prefix: 'notes/',
|
prefix: 'notes/',
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gte: 'notes/',
|
gte: 'notes/',
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gte: 'notes/',
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gte: `${DbPrefixes.Master}notes/`,
|
gte: `${DbPrefixes.Master}notes/`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
}, {
|
}, {
|
||||||
|
@ -323,11 +347,15 @@ const tests = [
|
||||||
prefix: 'notes/',
|
prefix: 'notes/',
|
||||||
keyMarker: 'notes/year.txt',
|
keyMarker: 'notes/year.txt',
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: `notes/year.txt${inc(VID_SEP)}`,
|
gt: `notes/year.txt${inc(VID_SEP)}`,
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: `notes/year.txt${inc(VID_SEP)}`,
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gt: `${DbPrefixes.Master}notes/year.txt${inc(VID_SEP)}`,
|
gt: `${DbPrefixes.Master}notes/year.txt${inc(VID_SEP)}`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
}, {
|
}, {
|
||||||
|
@ -352,11 +380,15 @@ const tests = [
|
||||||
keyMarker: 'notes/',
|
keyMarker: 'notes/',
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: `notes/${inc(VID_SEP)}`,
|
gt: `notes/${inc(VID_SEP)}`,
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: `notes/${inc(VID_SEP)}`,
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gt: `${DbPrefixes.Master}notes/${inc(VID_SEP)}`,
|
gt: `${DbPrefixes.Master}notes/${inc(VID_SEP)}`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
}, {
|
}, {
|
||||||
|
@ -378,11 +410,15 @@ const tests = [
|
||||||
keyMarker: 'notes/spring/',
|
keyMarker: 'notes/spring/',
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: `notes/spring/${inc(VID_SEP)}`,
|
gt: `notes/spring/${inc(VID_SEP)}`,
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: `notes/spring/${inc(VID_SEP)}`,
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gt: `${DbPrefixes.Master}notes/spring/${inc(VID_SEP)}`,
|
gt: `${DbPrefixes.Master}notes/spring/${inc(VID_SEP)}`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
}, {
|
}, {
|
||||||
|
@ -404,11 +440,15 @@ const tests = [
|
||||||
keyMarker: 'notes/summer/',
|
keyMarker: 'notes/summer/',
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: `notes/summer/${inc(VID_SEP)}`,
|
gt: `notes/summer/${inc(VID_SEP)}`,
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: `notes/summer/${inc(VID_SEP)}`,
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gt: `${DbPrefixes.Master}notes/summer/${inc(VID_SEP)}`,
|
gt: `${DbPrefixes.Master}notes/summer/${inc(VID_SEP)}`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
}, {
|
}, {
|
||||||
|
@ -432,11 +472,15 @@ const tests = [
|
||||||
keyMarker: 'notes/year.txt',
|
keyMarker: 'notes/year.txt',
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: `notes/year.txt${inc(VID_SEP)}`,
|
gt: `notes/year.txt${inc(VID_SEP)}`,
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: `notes/year.txt${inc(VID_SEP)}`,
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gt: `${DbPrefixes.Master}notes/year.txt${inc(VID_SEP)}`,
|
gt: `${DbPrefixes.Master}notes/year.txt${inc(VID_SEP)}`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
}, {
|
}, {
|
||||||
|
@ -460,11 +504,15 @@ const tests = [
|
||||||
keyMarker: 'notes/yore.rs',
|
keyMarker: 'notes/yore.rs',
|
||||||
maxKeys: 1,
|
maxKeys: 1,
|
||||||
}, {
|
}, {
|
||||||
v0: {
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
gt: `notes/yore.rs${inc(VID_SEP)}`,
|
gt: `notes/yore.rs${inc(VID_SEP)}`,
|
||||||
lt: `notes${inc('/')}`,
|
lt: `notes${inc('/')}`,
|
||||||
},
|
},
|
||||||
v1: [{
|
[BucketVersioningKeyFormat.v0mig]: {
|
||||||
|
gt: `notes/yore.rs${inc(VID_SEP)}`,
|
||||||
|
lt: `notes${inc('/')}`,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: [{
|
||||||
gt: `${DbPrefixes.Master}notes/yore.rs${inc(VID_SEP)}`,
|
gt: `${DbPrefixes.Master}notes/yore.rs${inc(VID_SEP)}`,
|
||||||
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
lt: `${DbPrefixes.Master}notes${inc('/')}`,
|
||||||
}, {
|
}, {
|
||||||
|
@ -481,14 +529,20 @@ const tests = [
|
||||||
}, (e, input) => e.key > input.keyMarker),
|
}, (e, input) => e.key > input.keyMarker),
|
||||||
];
|
];
|
||||||
|
|
||||||
function getTestListing(test, data, vFormat) {
|
function getTestListing(test, vFormat) {
|
||||||
return data
|
return rawListingData
|
||||||
|
.filter(e => [BucketVersioningKeyFormat.v0,
|
||||||
|
BucketVersioningKeyFormat.v0mig].includes(vFormat)
|
||||||
|
|| e.value !== valuePHD)
|
||||||
.filter(e => test.filter(e, test.input))
|
.filter(e => test.filter(e, test.input))
|
||||||
.map(e => {
|
.map(e => {
|
||||||
if (vFormat === 'v0') {
|
if ([BucketVersioningKeyFormat.v0,
|
||||||
|
BucketVersioningKeyFormat.v0mig].includes(vFormat)) {
|
||||||
return e;
|
return e;
|
||||||
}
|
}
|
||||||
if (vFormat === 'v1') {
|
if ([BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1].includes(vFormat)) {
|
||||||
const keyPrefix = e.key.includes(VID_SEP) ?
|
const keyPrefix = e.key.includes(VID_SEP) ?
|
||||||
DbPrefixes.Version : DbPrefixes.Master;
|
DbPrefixes.Version : DbPrefixes.Master;
|
||||||
return {
|
return {
|
||||||
|
@ -500,29 +554,60 @@ function getTestListing(test, data, vFormat) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
['v0', 'v1'].forEach(vFormat => {
|
[
|
||||||
|
BucketVersioningKeyFormat.v0,
|
||||||
|
BucketVersioningKeyFormat.v0mig,
|
||||||
|
BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1,
|
||||||
|
].forEach(vFormat => {
|
||||||
describe(`Delimiter All Versions listing algorithm vFormat=${vFormat}`, () => {
|
describe(`Delimiter All Versions listing algorithm vFormat=${vFormat}`, () => {
|
||||||
it('Should return good skipping value for DelimiterVersions', () => {
|
it('Should return good skipping value for DelimiterVersions', () => {
|
||||||
const delimiter = new DelimiterVersions({ delimiter: '/' });
|
const delimiter = new DelimiterVersions({ delimiter: '/' });
|
||||||
for (let i = 0; i < 100; i++) {
|
for (let i = 0; i < 100; i++) {
|
||||||
|
let key;
|
||||||
|
if ([BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1].includes(vFormat)) {
|
||||||
|
key = `${DbPrefixes.Master}foo/${zpad(i)}`;
|
||||||
|
} else {
|
||||||
|
key = `foo/${zpad(i)}`;
|
||||||
|
}
|
||||||
delimiter.filter({
|
delimiter.filter({
|
||||||
key: `${vFormat === 'v1' ? DbPrefixes.Master : ''}foo/${zpad(i)}`,
|
key,
|
||||||
value: '{}',
|
value: '{}',
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
assert.strictEqual(delimiter.skipping(),
|
let skipping;
|
||||||
`${vFormat === 'v1' ? DbPrefixes.Master : ''}foo/`);
|
if ([BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1].includes(vFormat)) {
|
||||||
|
skipping = `${DbPrefixes.Master}foo/`;
|
||||||
|
} else {
|
||||||
|
skipping = 'foo/';
|
||||||
|
}
|
||||||
|
assert.strictEqual(delimiter.skipping(), skipping);
|
||||||
});
|
});
|
||||||
|
|
||||||
tests.forEach(test => {
|
tests.forEach(test => {
|
||||||
it(`Should return metadata listing params to list ${test.name}`, () => {
|
it(`Should return metadata listing params to list ${test.name}`, () => {
|
||||||
const listing = new DelimiterVersions(test.input, logger, vFormat);
|
const listing = new DelimiterVersions(test.input, logger, vFormat);
|
||||||
const params = listing.genMDParams();
|
const params = listing.genMDParams();
|
||||||
assert.deepStrictEqual(params, test.genMDParams[vFormat]);
|
let paramsVFormat;
|
||||||
|
if ([BucketVersioningKeyFormat.v0v1,
|
||||||
|
BucketVersioningKeyFormat.v1mig,
|
||||||
|
BucketVersioningKeyFormat.v1].includes(vFormat)) {
|
||||||
|
// all above vformats are equivalent to v1 when it
|
||||||
|
// comes to generating md params
|
||||||
|
paramsVFormat = BucketVersioningKeyFormat.v1;
|
||||||
|
} else {
|
||||||
|
paramsVFormat = vFormat;
|
||||||
|
}
|
||||||
|
assert.deepStrictEqual(params, test.genMDParams[paramsVFormat]);
|
||||||
});
|
});
|
||||||
it(`Should list ${test.name}`, () => {
|
it(`Should list ${test.name}`, () => {
|
||||||
// Simulate skip scan done by LevelDB
|
// Simulate skip scan done by LevelDB
|
||||||
const d = getTestListing(test, dataVersioned[vFormat], vFormat);
|
const d = getTestListing(test, vFormat);
|
||||||
const res = performListing(d, DelimiterVersions, test.input, logger, vFormat);
|
const res = performListing(d, DelimiterVersions, test.input, logger, vFormat);
|
||||||
assert.deepStrictEqual(res, test.output);
|
assert.deepStrictEqual(res, test.output);
|
||||||
});
|
});
|
||||||
|
|
|
@ -2,10 +2,11 @@
|
||||||
|
|
||||||
const assert = require('assert');
|
const assert = require('assert');
|
||||||
|
|
||||||
const { checkLimit, inc, listingParamsMasterKeysV0ToV1 } =
|
const { checkLimit, inc, listingParamsMasterKeysV0ToV1, listingParamsV0ToV0Mig } =
|
||||||
require('../../../../lib/algos/list/tools');
|
require('../../../../lib/algos/list/tools');
|
||||||
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
|
const VSConst = require('../../../../lib/versioning/constants').VersioningConstants;
|
||||||
const { DbPrefixes } = VSConst;
|
const { DbPrefixes } = VSConst;
|
||||||
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
|
||||||
describe('checkLimit function', () => {
|
describe('checkLimit function', () => {
|
||||||
const tests = [
|
const tests = [
|
||||||
|
@ -102,3 +103,93 @@ describe('listingParamsMasterKeysV0ToV1', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('listingParamsV0ToV0Mig', () => {
|
||||||
|
const testCases = [
|
||||||
|
{
|
||||||
|
v0params: {},
|
||||||
|
v0migparams: [{
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
}, {
|
||||||
|
v0params: {
|
||||||
|
gte: 'foo/bar',
|
||||||
|
lt: 'foo/bas',
|
||||||
|
},
|
||||||
|
v0migparams: {
|
||||||
|
gte: 'foo/bar',
|
||||||
|
lt: 'foo/bas',
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
v0params: {
|
||||||
|
gt: `foo/bar${inc(VID_SEP)}`,
|
||||||
|
},
|
||||||
|
v0migparams: [{
|
||||||
|
gt: `foo/bar${inc(VID_SEP)}`,
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
}, {
|
||||||
|
v0params: {
|
||||||
|
gt: `foo/bar${VID_SEP}versionId`,
|
||||||
|
},
|
||||||
|
v0migparams: [{
|
||||||
|
gt: `foo/bar${VID_SEP}versionId`,
|
||||||
|
lt: DbPrefixes.V1,
|
||||||
|
}, {
|
||||||
|
gte: inc(DbPrefixes.V1),
|
||||||
|
serial: true,
|
||||||
|
}],
|
||||||
|
}, {
|
||||||
|
v0params: {
|
||||||
|
gt: `foo/bar/baz${VID_SEP}versionId`,
|
||||||
|
lt: 'foo/bas',
|
||||||
|
},
|
||||||
|
v0migparams: {
|
||||||
|
gt: `foo/bar/baz${VID_SEP}versionId`,
|
||||||
|
lt: 'foo/bas',
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
v0params: {
|
||||||
|
gt: `éléphant rose${VID_SEP}versionId`,
|
||||||
|
},
|
||||||
|
v0migparams: {
|
||||||
|
gt: `éléphant rose${VID_SEP}versionId`,
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
v0params: {
|
||||||
|
gte: 'éléphant rose',
|
||||||
|
lt: 'éléphant rosf',
|
||||||
|
},
|
||||||
|
v0migparams: {
|
||||||
|
gte: 'éléphant rose',
|
||||||
|
lt: 'éléphant rosf',
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
v0params: {
|
||||||
|
gt: `${DbPrefixes.V1}foo`,
|
||||||
|
},
|
||||||
|
v0migparams: {
|
||||||
|
gt: inc(DbPrefixes.V1),
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
v0params: {
|
||||||
|
gte: `${DbPrefixes.V1}foo/`,
|
||||||
|
lt: `${DbPrefixes.V1}foo0`,
|
||||||
|
},
|
||||||
|
v0migparams: {
|
||||||
|
lt: '',
|
||||||
|
},
|
||||||
|
}];
|
||||||
|
testCases.forEach(({ v0params, v0migparams }) => {
|
||||||
|
it(`${JSON.stringify(v0params)} => ${JSON.stringify(v0migparams)}`, () => {
|
||||||
|
const converted = listingParamsV0ToV0Mig(v0params);
|
||||||
|
assert.deepStrictEqual(converted, v0migparams);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
|
@ -1,38 +1,6 @@
|
||||||
const assert = require('assert');
|
const assert = require('assert');
|
||||||
const stream = require('stream');
|
|
||||||
const MergeStream = require('../../../../lib/algos/stream/MergeStream');
|
const MergeStream = require('../../../../lib/algos/stream/MergeStream');
|
||||||
|
const Streamify = require('./Streamify');
|
||||||
class Streamify extends stream.Readable {
|
|
||||||
constructor(objectsToSend, errorAtEnd) {
|
|
||||||
super({ objectMode: true });
|
|
||||||
this._remaining = Array.from(objectsToSend);
|
|
||||||
this._remaining.reverse();
|
|
||||||
this._errorAtEnd = errorAtEnd || false;
|
|
||||||
this._ended = false;
|
|
||||||
this._destroyed = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
_read() {
|
|
||||||
process.nextTick(() => {
|
|
||||||
while (this._remaining.length > 0) {
|
|
||||||
const item = this._remaining.pop();
|
|
||||||
if (!this.push(item)) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (this._errorAtEnd) {
|
|
||||||
return this.emit('error', new Error('OOPS'));
|
|
||||||
}
|
|
||||||
this._ended = true;
|
|
||||||
return this.push(null);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
_destroy(err, callback) {
|
|
||||||
this._destroyed = true;
|
|
||||||
callback();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function readAll(stream, usePauseResume, cb) {
|
function readAll(stream, usePauseResume, cb) {
|
||||||
const result = [];
|
const result = [];
|
||||||
|
@ -168,12 +136,16 @@ describe('MergeStream', () => {
|
||||||
`${testCasePretty(testCase, false)}` +
|
`${testCasePretty(testCase, false)}` +
|
||||||
`${usePauseResume ? ' with pause/resume' : ''}` +
|
`${usePauseResume ? ' with pause/resume' : ''}` +
|
||||||
`${errorAtEnd ? ' with error' : ''}`;
|
`${errorAtEnd ? ' with error' : ''}`;
|
||||||
|
const testDescRev =
|
||||||
|
`${testCasePretty(testCase, true)}` +
|
||||||
|
`${usePauseResume ? ' with pause/resume' : ''}` +
|
||||||
|
`${errorAtEnd ? ' with error' : ''}`;
|
||||||
it(`should cover ${testDesc}`, done => {
|
it(`should cover ${testDesc}`, done => {
|
||||||
testMergeStreamWithIntegers(
|
testMergeStreamWithIntegers(
|
||||||
testCase.stream1, testCase.stream2,
|
testCase.stream1, testCase.stream2,
|
||||||
usePauseResume, errorAtEnd, done);
|
usePauseResume, errorAtEnd, done);
|
||||||
});
|
});
|
||||||
it(`should cover ${testDesc}`, done => {
|
it(`should cover ${testDescRev}`, done => {
|
||||||
testMergeStreamWithIntegers(
|
testMergeStreamWithIntegers(
|
||||||
testCase.stream2, testCase.stream1,
|
testCase.stream2, testCase.stream1,
|
||||||
usePauseResume, errorAtEnd, done);
|
usePauseResume, errorAtEnd, done);
|
||||||
|
|
|
@ -0,0 +1,153 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const SerialStream = require('../../../../lib/algos/stream/SerialStream');
|
||||||
|
const Streamify = require('./Streamify');
|
||||||
|
|
||||||
|
function readAll(stream, usePauseResume, cb) {
|
||||||
|
const result = [];
|
||||||
|
stream.on('data', item => {
|
||||||
|
result.push(item);
|
||||||
|
if (usePauseResume) {
|
||||||
|
stream.pause();
|
||||||
|
setTimeout(() => stream.resume(), 1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
stream.once('end', () => cb(null, result));
|
||||||
|
stream.once('error', err => cb(err));
|
||||||
|
}
|
||||||
|
|
||||||
|
function testSerialStreamWithIntegers(contents1, contents2,
|
||||||
|
usePauseResume, errorAtEnd, cb) {
|
||||||
|
const expectedItems = contents1.concat(contents2);
|
||||||
|
const serialStream = new SerialStream(
|
||||||
|
new Streamify(contents1, errorAtEnd)
|
||||||
|
.on('error', () => {}),
|
||||||
|
new Streamify(contents2)
|
||||||
|
.on('error', () => {}));
|
||||||
|
readAll(serialStream, usePauseResume, (err, readItems) => {
|
||||||
|
if (errorAtEnd) {
|
||||||
|
assert(err);
|
||||||
|
} else {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.deepStrictEqual(readItems, expectedItems);
|
||||||
|
}
|
||||||
|
cb();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function testCasePretty(testCase, reversed) {
|
||||||
|
const desc1 = JSON.stringify(
|
||||||
|
reversed ? testCase.stream2 : testCase.stream1);
|
||||||
|
const desc2 = JSON.stringify(
|
||||||
|
reversed ? testCase.stream1 : testCase.stream2);
|
||||||
|
return `${desc1} concatenated with ${desc2}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('SerialStream', () => {
|
||||||
|
[
|
||||||
|
{
|
||||||
|
stream1: [],
|
||||||
|
stream2: [],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
stream1: [0],
|
||||||
|
stream2: [],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
stream1: [0, 1, 2, 3, 4],
|
||||||
|
stream2: [],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
stream1: [0],
|
||||||
|
stream2: [1],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
stream1: [1, 2, 3, 4, 5],
|
||||||
|
stream2: [6],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
stream1: [1, 2, 3, 4, 5],
|
||||||
|
stream2: [6, 7, 8, 9, 10],
|
||||||
|
},
|
||||||
|
].forEach(testCase => {
|
||||||
|
[false, true].forEach(usePauseResume => {
|
||||||
|
[false, true].forEach(errorAtEnd => {
|
||||||
|
const testDesc =
|
||||||
|
`${testCasePretty(testCase, false)}` +
|
||||||
|
`${usePauseResume ? ' with pause/resume' : ''}` +
|
||||||
|
`${errorAtEnd ? ' with error' : ''}`;
|
||||||
|
const testDescRev =
|
||||||
|
`${testCasePretty(testCase, true)}` +
|
||||||
|
`${usePauseResume ? ' with pause/resume' : ''}` +
|
||||||
|
`${errorAtEnd ? ' with error' : ''}`;
|
||||||
|
it(`should cover ${testDesc}`, done => {
|
||||||
|
testSerialStreamWithIntegers(
|
||||||
|
testCase.stream1, testCase.stream2,
|
||||||
|
usePauseResume, errorAtEnd, done);
|
||||||
|
});
|
||||||
|
it(`should cover ${testDescRev}`, done => {
|
||||||
|
testSerialStreamWithIntegers(
|
||||||
|
testCase.stream2, testCase.stream1,
|
||||||
|
usePauseResume, errorAtEnd, done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
[100, 1000, 10000, 100000].forEach(nbEntries => {
|
||||||
|
[false, true].forEach(usePauseResume => {
|
||||||
|
[false, true].forEach(errorAtEnd => {
|
||||||
|
if ((!usePauseResume && !errorAtEnd) || nbEntries <= 1000) {
|
||||||
|
const fixtureDesc =
|
||||||
|
`${usePauseResume ? ' with pause/resume' : ''}` +
|
||||||
|
`${errorAtEnd ? ' with error' : ''}`;
|
||||||
|
it(`${nbEntries} entries${fixtureDesc}`,
|
||||||
|
function bigConcatSequential(done) {
|
||||||
|
this.timeout(10000);
|
||||||
|
const stream1 = [];
|
||||||
|
const stream2 = [];
|
||||||
|
for (let i = 0; i < nbEntries / 2; ++i) {
|
||||||
|
stream1.push(i);
|
||||||
|
}
|
||||||
|
for (let i = nbEntries / 2; i < nbEntries; ++i) {
|
||||||
|
stream2.push(i);
|
||||||
|
}
|
||||||
|
testSerialStreamWithIntegers(
|
||||||
|
stream1, stream2, usePauseResume, errorAtEnd, done);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
// with 3 items per input stream, we reach the end of stream even
|
||||||
|
// though destroy() has been called (due to buffering), while with
|
||||||
|
// 100 items input streams are aborted before emitting the 'end'
|
||||||
|
// event, so it's useful to test both cases
|
||||||
|
[3, 100].forEach(nbItemsPerStream => {
|
||||||
|
it(`destroy() should destroy both inner streams with ${nbItemsPerStream} items per stream`,
|
||||||
|
done => {
|
||||||
|
const stream1 = new Streamify(new Array(nbItemsPerStream).fill()
|
||||||
|
.map((e, i) => i));
|
||||||
|
const stream2 = new Streamify(new Array(nbItemsPerStream).fill()
|
||||||
|
.map((e, i) => nbItemsPerStream + i));
|
||||||
|
const serialStream = new SerialStream(stream1, stream2);
|
||||||
|
serialStream.on('data', item => {
|
||||||
|
if (item === 5) {
|
||||||
|
serialStream.destroy();
|
||||||
|
const s1ended = stream1._ended;
|
||||||
|
const s2ended = stream2._ended;
|
||||||
|
setTimeout(() => {
|
||||||
|
if (!s1ended) {
|
||||||
|
assert(stream1._destroyed);
|
||||||
|
}
|
||||||
|
if (!s2ended) {
|
||||||
|
assert(stream2._destroyed);
|
||||||
|
}
|
||||||
|
done();
|
||||||
|
}, 10);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
serialStream.once('error', err => {
|
||||||
|
assert.fail(`unexpected error: ${err.message}`);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,35 @@
|
||||||
|
const stream = require('stream');
|
||||||
|
|
||||||
|
class Streamify extends stream.Readable {
|
||||||
|
constructor(objectsToSend, errorAtEnd) {
|
||||||
|
super({ objectMode: true });
|
||||||
|
this._remaining = Array.from(objectsToSend);
|
||||||
|
this._remaining.reverse();
|
||||||
|
this._errorAtEnd = errorAtEnd || false;
|
||||||
|
this._ended = false;
|
||||||
|
this._destroyed = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
_read() {
|
||||||
|
process.nextTick(() => {
|
||||||
|
while (this._remaining.length > 0) {
|
||||||
|
const item = this._remaining.pop();
|
||||||
|
if (!this.push(item)) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this._errorAtEnd) {
|
||||||
|
return this.emit('error', new Error('OOPS'));
|
||||||
|
}
|
||||||
|
this._ended = true;
|
||||||
|
return this.push(null);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
_destroy(err, callback) {
|
||||||
|
this._destroyed = true;
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = Streamify;
|
Loading…
Reference in New Issue