Compare commits
4 Commits
developmen
...
improvemen
Author | SHA1 | Date |
---|---|---|
Rahul Padigela | 8a57523f55 | |
Rahul Padigela | 8c8d9e2051 | |
Rahul Padigela | 7416c12702 | |
Rahul Padigela | a8563379aa |
|
@ -10,3 +10,5 @@ node_modules/
|
||||||
*-linux
|
*-linux
|
||||||
*-macos
|
*-macos
|
||||||
|
|
||||||
|
# Coverage
|
||||||
|
coverage/
|
||||||
|
|
|
@ -0,0 +1,7 @@
|
||||||
|
module.exports = {
|
||||||
|
testEnvironment: 'node',
|
||||||
|
moduleFileExtensions: ['js', 'jsx', 'json', 'node'],
|
||||||
|
setupFiles: [],
|
||||||
|
setupFilesAfterEnv: ['<rootDir>/jest.postsetup.js'],
|
||||||
|
testMatch: ['**/tests/**/*.js?(x)']
|
||||||
|
};
|
|
@ -0,0 +1 @@
|
||||||
|
jest.setTimeout(120000);
|
File diff suppressed because it is too large
Load Diff
|
@ -41,9 +41,10 @@
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"eslint": "2.13.1",
|
"eslint": "2.13.1",
|
||||||
"eslint-plugin-react": "^4.3.0",
|
|
||||||
"eslint-config-airbnb": "6.2.0",
|
"eslint-config-airbnb": "6.2.0",
|
||||||
"eslint-config-scality": "scality/Guidelines#71a059ad",
|
"eslint-config-scality": "scality/Guidelines#71a059ad",
|
||||||
|
"eslint-plugin-react": "^4.3.0",
|
||||||
|
"jest": "^24.8.0",
|
||||||
"lolex": "1.5.2",
|
"lolex": "1.5.2",
|
||||||
"mocha": "2.5.3",
|
"mocha": "2.5.3",
|
||||||
"temp": "0.8.3"
|
"temp": "0.8.3"
|
||||||
|
@ -52,8 +53,8 @@
|
||||||
"lint": "eslint $(git ls-files '*.js')",
|
"lint": "eslint $(git ls-files '*.js')",
|
||||||
"lint_md": "mdlint $(git ls-files '*.md')",
|
"lint_md": "mdlint $(git ls-files '*.md')",
|
||||||
"lint_yml": "yamllint $(git ls-files '*.yml')",
|
"lint_yml": "yamllint $(git ls-files '*.yml')",
|
||||||
"test": "mocha --recursive --timeout 5500 tests/unit",
|
"test": "jest --projects jest.config.js --coverage --testPathPattern='tests/unit/[\\w/-]+\\.[tj]s' tests/unit",
|
||||||
"ft_test": "find tests/functional -name \"*.js\" | grep -v \"utils/\" | xargs mocha --timeout 120000"
|
"ft_test": "jest tests/functional"
|
||||||
},
|
},
|
||||||
"private": true
|
"private": true
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,19 +35,19 @@ describe('Clustering', () => {
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should create and stop workers properly', done => {
|
test('Should create and stop workers properly', done => {
|
||||||
runTest('simple', done);
|
runTest('simple', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should restart workers until clustering stopped', done => {
|
test('Should restart workers until clustering stopped', done => {
|
||||||
runTest('watchdog', done);
|
runTest('watchdog', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should shutdown cluster if master killed', done => {
|
test('Should shutdown cluster if master killed', done => {
|
||||||
runTest('killed', done);
|
runTest('killed', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should timeout shutdown of workers if not exiting properly', done => {
|
test('Should timeout shutdown of workers if not exiting properly', done => {
|
||||||
runTest('shutdownTimeout', done);
|
runTest('shutdownTimeout', done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -122,14 +122,14 @@ describe('Multipart Uploads listing algorithm', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should perform a listing of all keys', done => {
|
test('should perform a listing of all keys', done => {
|
||||||
const listingResult = performListing(keys, MultipartUploads,
|
const listingResult = performListing(keys, MultipartUploads,
|
||||||
listingParams, logger);
|
listingParams, logger);
|
||||||
assert.deepStrictEqual(listingResult, expectedResult);
|
assert.deepStrictEqual(listingResult, expectedResult);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should perform a listing with delimiter', done => {
|
test('should perform a listing with delimiter', done => {
|
||||||
const delimiter = '/';
|
const delimiter = '/';
|
||||||
listingParams.delimiter = delimiter;
|
listingParams.delimiter = delimiter;
|
||||||
// format result
|
// format result
|
||||||
|
@ -146,7 +146,7 @@ describe('Multipart Uploads listing algorithm', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should perform a listing with max keys', done => {
|
test('should perform a listing with max keys', done => {
|
||||||
listingParams.maxKeys = 3;
|
listingParams.maxKeys = 3;
|
||||||
// format result
|
// format result
|
||||||
expectedResult.Uploads.pop();
|
expectedResult.Uploads.pop();
|
||||||
|
|
|
@ -33,15 +33,15 @@ describe('Basic listing algorithm', () => {
|
||||||
new Test('without parameters', undefined, data.slice(0, 10000)),
|
new Test('without parameters', undefined, data.slice(0, 10000)),
|
||||||
new Test('with bad parameters', 'lala', data.slice(0, 10000)),
|
new Test('with bad parameters', 'lala', data.slice(0, 10000)),
|
||||||
];
|
];
|
||||||
tests.forEach(test => {
|
tests.forEach(t => {
|
||||||
it(`Should list ${test.name}`, done => {
|
test(`Should list ${t.name}`, done => {
|
||||||
const res = performListing(data, Basic, test.input, logger);
|
const res = performListing(data, Basic, t.input, logger);
|
||||||
assert.deepStrictEqual(res, test.output);
|
assert.deepStrictEqual(res, t.output);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should support entries with no key', () => {
|
test('Should support entries with no key', () => {
|
||||||
const res1 = performListing([{
|
const res1 = performListing([{
|
||||||
value: '{"data":"foo"}',
|
value: '{"data":"foo"}',
|
||||||
}], Basic, { maxKeys: 1 }, logger);
|
}], Basic, { maxKeys: 1 }, logger);
|
||||||
|
@ -60,7 +60,7 @@ describe('Basic listing algorithm', () => {
|
||||||
}]);
|
}]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should support key-only listing', () => {
|
test('Should support key-only listing', () => {
|
||||||
const res = performListing(['key1', 'key2'],
|
const res = performListing(['key1', 'key2'],
|
||||||
Basic, { maxKeys: 1 }, logger);
|
Basic, { maxKeys: 1 }, logger);
|
||||||
assert.deepStrictEqual(res, ['key1']);
|
assert.deepStrictEqual(res, ['key1']);
|
||||||
|
|
|
@ -474,7 +474,7 @@ const alphabeticalOrderTests = [
|
||||||
|
|
||||||
|
|
||||||
describe('Delimiter listing algorithm', () => {
|
describe('Delimiter listing algorithm', () => {
|
||||||
it('Should return good skipping value for DelimiterMaster', done => {
|
test('Should return good skipping value for DelimiterMaster', done => {
|
||||||
const delimiter = new DelimiterMaster({ delimiter: '/' });
|
const delimiter = new DelimiterMaster({ delimiter: '/' });
|
||||||
for (let i = 0; i < 100; i++) {
|
for (let i = 0; i < 100; i++) {
|
||||||
delimiter.filter({ key: `foo/${zpad(i)}`, value: '{}' });
|
delimiter.filter({ key: `foo/${zpad(i)}`, value: '{}' });
|
||||||
|
@ -483,7 +483,8 @@ describe('Delimiter listing algorithm', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should set Delimiter alphabeticalOrder field to the expected value',
|
test(
|
||||||
|
'Should set Delimiter alphabeticalOrder field to the expected value',
|
||||||
() => {
|
() => {
|
||||||
alphabeticalOrderTests.forEach(test => {
|
alphabeticalOrderTests.forEach(test => {
|
||||||
const delimiter = new Delimiter(test.params);
|
const delimiter = new Delimiter(test.params);
|
||||||
|
@ -491,31 +492,31 @@ describe('Delimiter listing algorithm', () => {
|
||||||
test.expectedValue,
|
test.expectedValue,
|
||||||
`${JSON.stringify(test.params)}`);
|
`${JSON.stringify(test.params)}`);
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
tests.forEach(test => {
|
tests.forEach(t => {
|
||||||
it(`Should list ${test.name}`, done => {
|
test(`Should list ${t.name}`, done => {
|
||||||
// Simulate skip scan done by LevelDB
|
// Simulate skip scan done by LevelDB
|
||||||
const d = data.filter(e => test.filter(e, test.input));
|
const d = data.filter(e => t.filter(e, t.input));
|
||||||
const res = performListing(d, Delimiter, test.input, logger);
|
const res = performListing(d, Delimiter, t.input, logger);
|
||||||
assert.deepStrictEqual(res, test.output);
|
assert.deepStrictEqual(res, t.output);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
tests.forEach(test => {
|
tests.forEach(t => {
|
||||||
it(`Should list master versions ${test.name}`, done => {
|
test(`Should list master versions ${t.name}`, done => {
|
||||||
// Simulate skip scan done by LevelDB
|
// Simulate skip scan done by LevelDB
|
||||||
const d = dataVersioned.filter(e => test.filter(e, test.input));
|
const d = dataVersioned.filter(e => t.filter(e, t.input));
|
||||||
const res = performListing(d, DelimiterMaster, test.input, logger);
|
const res = performListing(d, DelimiterMaster, t.input, logger);
|
||||||
assert.deepStrictEqual(res, test.output);
|
assert.deepStrictEqual(res, t.output);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should filter values according to alphabeticalOrder parameter',
|
test('Should filter values according to alphabeticalOrder parameter', () => {
|
||||||
() => {
|
let t = new Test('alphabeticalOrder parameter set', {
|
||||||
let test = new Test('alphabeticalOrder parameter set', {
|
|
||||||
delimiter: '/',
|
delimiter: '/',
|
||||||
alphabeticalOrder: true,
|
alphabeticalOrder: true,
|
||||||
}, {
|
}, {
|
||||||
|
@ -527,11 +528,11 @@ describe('Delimiter listing algorithm', () => {
|
||||||
IsTruncated: false,
|
IsTruncated: false,
|
||||||
NextMarker: undefined,
|
NextMarker: undefined,
|
||||||
});
|
});
|
||||||
let d = nonAlphabeticalData.filter(e => test.filter(e, test.input));
|
let d = nonAlphabeticalData.filter(e => t.filter(e, t.input));
|
||||||
let res = performListing(d, Delimiter, test.input, logger);
|
let res = performListing(d, Delimiter, t.input, logger);
|
||||||
assert.deepStrictEqual(res, test.output);
|
assert.deepStrictEqual(res, t.output);
|
||||||
|
|
||||||
test = new Test('alphabeticalOrder parameter set', {
|
t = new Test('alphabeticalOrder parameter set', {
|
||||||
delimiter: '/',
|
delimiter: '/',
|
||||||
alphabeticalOrder: false,
|
alphabeticalOrder: false,
|
||||||
}, {
|
}, {
|
||||||
|
@ -544,8 +545,8 @@ describe('Delimiter listing algorithm', () => {
|
||||||
IsTruncated: false,
|
IsTruncated: false,
|
||||||
NextMarker: undefined,
|
NextMarker: undefined,
|
||||||
});
|
});
|
||||||
d = nonAlphabeticalData.filter(e => test.filter(e, test.input));
|
d = nonAlphabeticalData.filter(e => t.filter(e, t.input));
|
||||||
res = performListing(d, Delimiter, test.input, logger);
|
res = performListing(d, Delimiter, t.input, logger);
|
||||||
assert.deepStrictEqual(res, test.output);
|
assert.deepStrictEqual(res, t.output);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -34,7 +34,7 @@ const fakeLogger = {
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Delimiter All masters listing algorithm', () => {
|
describe('Delimiter All masters listing algorithm', () => {
|
||||||
it('should return SKIP_NONE for DelimiterMaster when both NextMarker ' +
|
test('should return SKIP_NONE for DelimiterMaster when both NextMarker ' +
|
||||||
'and NextContinuationToken are undefined', () => {
|
'and NextContinuationToken are undefined', () => {
|
||||||
const delimiter = new DelimiterMaster({ delimiter: '/' }, fakeLogger);
|
const delimiter = new DelimiterMaster({ delimiter: '/' }, fakeLogger);
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
assert.strictEqual(delimiter.skipping(), SKIP_NONE);
|
assert.strictEqual(delimiter.skipping(), SKIP_NONE);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
|
test('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
|
||||||
'NextMarker is set and there is a delimiter', () => {
|
'NextMarker is set and there is a delimiter', () => {
|
||||||
const key = 'key';
|
const key = 'key';
|
||||||
const delimiter = new DelimiterMaster({ delimiter: '/', marker: key },
|
const delimiter = new DelimiterMaster({ delimiter: '/', marker: key },
|
||||||
|
@ -62,7 +62,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
assert.strictEqual(delimiter.skipping(), key + VID_SEP);
|
assert.strictEqual(delimiter.skipping(), key + VID_SEP);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
|
test('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
|
||||||
'NextContinuationToken is set and there is a delimiter', () => {
|
'NextContinuationToken is set and there is a delimiter', () => {
|
||||||
const key = 'key';
|
const key = 'key';
|
||||||
const delimiter = new DelimiterMaster(
|
const delimiter = new DelimiterMaster(
|
||||||
|
@ -76,7 +76,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
assert.strictEqual(delimiter.skipping(), key + VID_SEP);
|
assert.strictEqual(delimiter.skipping(), key + VID_SEP);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return NextMarker for DelimiterMaster when NextMarker is set' +
|
test('should return NextMarker for DelimiterMaster when NextMarker is set' +
|
||||||
', there is a delimiter and the key ends with the delimiter', () => {
|
', there is a delimiter and the key ends with the delimiter', () => {
|
||||||
const delimiterChar = '/';
|
const delimiterChar = '/';
|
||||||
const keyWithEndingDelimiter = `key${delimiterChar}`;
|
const keyWithEndingDelimiter = `key${delimiterChar}`;
|
||||||
|
@ -91,7 +91,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
assert.strictEqual(delimiter.skipping(), keyWithEndingDelimiter);
|
assert.strictEqual(delimiter.skipping(), keyWithEndingDelimiter);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should skip entries not starting with prefix', () => {
|
test('should skip entries not starting with prefix', () => {
|
||||||
const delimiter = new DelimiterMaster({ prefix: 'prefix' }, fakeLogger);
|
const delimiter = new DelimiterMaster({ prefix: 'prefix' }, fakeLogger);
|
||||||
|
|
||||||
assert.strictEqual(delimiter.filter({ key: 'wrong' }), FILTER_SKIP);
|
assert.strictEqual(delimiter.filter({ key: 'wrong' }), FILTER_SKIP);
|
||||||
|
@ -100,7 +100,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should skip entries superior to next marker', () => {
|
test('should skip entries superior to next marker', () => {
|
||||||
const delimiter = new DelimiterMaster({ marker: 'b' }, fakeLogger);
|
const delimiter = new DelimiterMaster({ marker: 'b' }, fakeLogger);
|
||||||
|
|
||||||
assert.strictEqual(delimiter.filter({ key: 'a' }), FILTER_SKIP);
|
assert.strictEqual(delimiter.filter({ key: 'a' }), FILTER_SKIP);
|
||||||
|
@ -109,7 +109,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept a master version', () => {
|
test('should accept a master version', () => {
|
||||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||||
const key = 'key';
|
const key = 'key';
|
||||||
const value = '';
|
const value = '';
|
||||||
|
@ -126,7 +126,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept a PHD version as first input', () => {
|
test('should accept a PHD version as first input', () => {
|
||||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||||
const keyPHD = 'keyPHD';
|
const keyPHD = 'keyPHD';
|
||||||
const objPHD = {
|
const objPHD = {
|
||||||
|
@ -143,7 +143,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept a PHD version', () => {
|
test('should accept a PHD version', () => {
|
||||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||||
const key = 'keyA';
|
const key = 'keyA';
|
||||||
const value = '';
|
const value = '';
|
||||||
|
@ -172,7 +172,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept a version after a PHD', () => {
|
test('should accept a version after a PHD', () => {
|
||||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||||
const masterKey = 'key';
|
const masterKey = 'key';
|
||||||
const keyVersion = `${masterKey}${VID_SEP}version`;
|
const keyVersion = `${masterKey}${VID_SEP}version`;
|
||||||
|
@ -202,7 +202,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept a delete marker', () => {
|
test('should accept a delete marker', () => {
|
||||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||||
const version = new Version({ isDeleteMarker: true });
|
const version = new Version({ isDeleteMarker: true });
|
||||||
const key = 'key';
|
const key = 'key';
|
||||||
|
@ -219,7 +219,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should skip version after a delete marker', () => {
|
test('should skip version after a delete marker', () => {
|
||||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||||
const version = new Version({ isDeleteMarker: true });
|
const version = new Version({ isDeleteMarker: true });
|
||||||
const key = 'key';
|
const key = 'key';
|
||||||
|
@ -235,7 +235,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
assert.deepStrictEqual(delimiter.result(), EmptyResult);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept a new key after a delete marker', () => {
|
test('should accept a new key after a delete marker', () => {
|
||||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||||
const version = new Version({ isDeleteMarker: true });
|
const version = new Version({ isDeleteMarker: true });
|
||||||
const key1 = 'key1';
|
const key1 = 'key1';
|
||||||
|
@ -258,7 +258,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept the master version and skip the other ones', () => {
|
test('should accept the master version and skip the other ones', () => {
|
||||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||||
const masterKey = 'key';
|
const masterKey = 'key';
|
||||||
const masterValue = 'value';
|
const masterValue = 'value';
|
||||||
|
@ -286,7 +286,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return good listing result for version', () => {
|
test('should return good listing result for version', () => {
|
||||||
const delimiter = new DelimiterMaster({}, fakeLogger);
|
const delimiter = new DelimiterMaster({}, fakeLogger);
|
||||||
const masterKey = 'key';
|
const masterKey = 'key';
|
||||||
const versionKey1 = `${masterKey}${VID_SEP}version1`;
|
const versionKey1 = `${masterKey}${VID_SEP}version1`;
|
||||||
|
@ -320,7 +320,8 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return good values for entries with different common prefixes',
|
test(
|
||||||
|
'should return good values for entries with different common prefixes',
|
||||||
() => {
|
() => {
|
||||||
const delimiterChar = '/';
|
const delimiterChar = '/';
|
||||||
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
|
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
|
||||||
|
@ -368,13 +369,14 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
NextMarker: undefined,
|
NextMarker: undefined,
|
||||||
Delimiter: delimiterChar,
|
Delimiter: delimiterChar,
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
/* We test here the internal management of the prvKey field of the
|
/* We test here the internal management of the prvKey field of the
|
||||||
* DelimiterMaster class, in particular once it has been set to an entry
|
* DelimiterMaster class, in particular once it has been set to an entry
|
||||||
* key before to finally skip this entry because of an already present
|
* key before to finally skip this entry because of an already present
|
||||||
* common prefix. */
|
* common prefix. */
|
||||||
it('should accept a version after skipping an object because of its ' +
|
test('should accept a version after skipping an object because of its ' +
|
||||||
'commonPrefix', () => {
|
'commonPrefix', () => {
|
||||||
const delimiterChar = '/';
|
const delimiterChar = '/';
|
||||||
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
|
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
|
||||||
|
@ -409,7 +411,7 @@ describe('Delimiter All masters listing algorithm', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should skip a versioned entry when there is a delimiter and the key ' +
|
test('should skip a versioned entry when there is a delimiter and the key ' +
|
||||||
'starts with the NextMarker value', () => {
|
'starts with the NextMarker value', () => {
|
||||||
const delimiterChar = '/';
|
const delimiterChar = '/';
|
||||||
const commonPrefix = `commonPrefix${delimiterChar}`;
|
const commonPrefix = `commonPrefix${delimiterChar}`;
|
||||||
|
|
|
@ -284,7 +284,7 @@ const tests = [
|
||||||
];
|
];
|
||||||
|
|
||||||
describe('Delimiter All Versions listing algorithm', () => {
|
describe('Delimiter All Versions listing algorithm', () => {
|
||||||
it('Should return good skipping value for DelimiterVersions', done => {
|
test('Should return good skipping value for DelimiterVersions', done => {
|
||||||
const delimiter = new DelimiterVersions({ delimiter: '/' });
|
const delimiter = new DelimiterVersions({ delimiter: '/' });
|
||||||
for (let i = 0; i < 100; i++) {
|
for (let i = 0; i < 100; i++) {
|
||||||
delimiter.filter({ key: `foo/${zpad(i)}`, value: '{}' });
|
delimiter.filter({ key: `foo/${zpad(i)}`, value: '{}' });
|
||||||
|
@ -293,13 +293,13 @@ describe('Delimiter All Versions listing algorithm', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
tests.forEach(test => {
|
tests.forEach(t => {
|
||||||
it(`Should list ${test.name}`, done => {
|
test(`Should list ${t.name}`, done => {
|
||||||
// Simulate skip scan done by LevelDB
|
// Simulate skip scan done by LevelDB
|
||||||
const d = dataVersioned.filter(e => test.filter(e, test.input));
|
const d = dataVersioned.filter(e => t.filter(e, t.input));
|
||||||
const res =
|
const res =
|
||||||
performListing(d, DelimiterVersions, test.input, logger);
|
performListing(d, DelimiterVersions, t.input, logger);
|
||||||
assert.deepStrictEqual(res, test.output);
|
assert.deepStrictEqual(res, t.output);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -16,7 +16,7 @@ describe('checkLimit function', () => {
|
||||||
{ input: [0, 0], output: 0 },
|
{ input: [0, 0], output: 0 },
|
||||||
];
|
];
|
||||||
tests.forEach((test, index) => {
|
tests.forEach((test, index) => {
|
||||||
it(`test${index}`, done => {
|
test(`test${index}`, done => {
|
||||||
const res = checkLimit(test.input[0], test.input[1]);
|
const res = checkLimit(test.input[0], test.input[1]);
|
||||||
assert.deepStrictEqual(res, test.output);
|
assert.deepStrictEqual(res, test.output);
|
||||||
done();
|
done();
|
||||||
|
|
|
@ -23,11 +23,11 @@ const infoFromVault = {
|
||||||
const authInfo = new AuthInfo(infoFromVault);
|
const authInfo = new AuthInfo(infoFromVault);
|
||||||
|
|
||||||
describe('AuthInfo class constructor', () => {
|
describe('AuthInfo class constructor', () => {
|
||||||
it('should return an object', () => {
|
test('should return an object', () => {
|
||||||
assert.strictEqual(typeof authInfo, 'object');
|
assert.strictEqual(typeof authInfo, 'object');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set properties', () => {
|
test('should set properties', () => {
|
||||||
assert.strictEqual(authInfo.arn, arn);
|
assert.strictEqual(authInfo.arn, arn);
|
||||||
assert.strictEqual(authInfo.canonicalID, canonicalID);
|
assert.strictEqual(authInfo.canonicalID, canonicalID);
|
||||||
assert.strictEqual(authInfo.shortid, shortid);
|
assert.strictEqual(authInfo.shortid, shortid);
|
||||||
|
@ -36,51 +36,51 @@ describe('AuthInfo class constructor', () => {
|
||||||
assert.strictEqual(authInfo.IAMdisplayName, IAMdisplayName);
|
assert.strictEqual(authInfo.IAMdisplayName, IAMdisplayName);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have a working getArn() method', () => {
|
test('should have a working getArn() method', () => {
|
||||||
assert.strictEqual(authInfo.getArn(), arn);
|
assert.strictEqual(authInfo.getArn(), arn);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have a working getCanonicalID() method', () => {
|
test('should have a working getCanonicalID() method', () => {
|
||||||
assert.strictEqual(authInfo.getCanonicalID(), canonicalID);
|
assert.strictEqual(authInfo.getCanonicalID(), canonicalID);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have a working getShortid() method', () => {
|
test('should have a working getShortid() method', () => {
|
||||||
assert.strictEqual(authInfo.getShortid(), shortid);
|
assert.strictEqual(authInfo.getShortid(), shortid);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have a working getEmail() method', () => {
|
test('should have a working getEmail() method', () => {
|
||||||
assert.strictEqual(authInfo.getEmail(), email);
|
assert.strictEqual(authInfo.getEmail(), email);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have a working getAccountDisplayName() method', () => {
|
test('should have a working getAccountDisplayName() method', () => {
|
||||||
assert.strictEqual(authInfo.getAccountDisplayName(),
|
assert.strictEqual(authInfo.getAccountDisplayName(),
|
||||||
accountDisplayName);
|
accountDisplayName);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have a working getIAMdisplayName() method', () => {
|
test('should have a working getIAMdisplayName() method', () => {
|
||||||
assert.strictEqual(authInfo.getIAMdisplayName(), IAMdisplayName);
|
assert.strictEqual(authInfo.getIAMdisplayName(), IAMdisplayName);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have a working isRequesterAnIAMUser() method', () => {
|
test('should have a working isRequesterAnIAMUser() method', () => {
|
||||||
assert.strictEqual(authInfo.isRequesterAnIAMUser(), true);
|
assert.strictEqual(authInfo.isRequesterAnIAMUser(), true);
|
||||||
const accountUser = new AuthInfo({ canonicalID: 'account' });
|
const accountUser = new AuthInfo({ canonicalID: 'account' });
|
||||||
assert.strictEqual(accountUser.isRequesterAnIAMUser(), false);
|
assert.strictEqual(accountUser.isRequesterAnIAMUser(), false);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have a working isRequesterPublicUser() method', () => {
|
test('should have a working isRequesterPublicUser() method', () => {
|
||||||
assert.strictEqual(authInfo.isRequesterPublicUser(), false);
|
assert.strictEqual(authInfo.isRequesterPublicUser(), false);
|
||||||
const publicUser = new AuthInfo({ canonicalID: constants.publicId });
|
const publicUser = new AuthInfo({ canonicalID: constants.publicId });
|
||||||
assert.strictEqual(publicUser.isRequesterPublicUser(), true);
|
assert.strictEqual(publicUser.isRequesterPublicUser(), true);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have a working isRequesterAServiceAccount() method', () => {
|
test('should have a working isRequesterAServiceAccount() method', () => {
|
||||||
assert.strictEqual(authInfo.isRequesterAServiceAccount(), false);
|
assert.strictEqual(authInfo.isRequesterAServiceAccount(), false);
|
||||||
const serviceAccount = new AuthInfo({
|
const serviceAccount = new AuthInfo({
|
||||||
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
|
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
|
||||||
assert.strictEqual(serviceAccount.isRequesterAServiceAccount(), true);
|
assert.strictEqual(serviceAccount.isRequesterAServiceAccount(), true);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have a working isRequesterThisServiceAccount() method', () => {
|
test('should have a working isRequesterThisServiceAccount() method', () => {
|
||||||
const serviceAccount = new AuthInfo({
|
const serviceAccount = new AuthInfo({
|
||||||
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
|
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
|
||||||
assert.strictEqual(
|
assert.strictEqual(
|
||||||
|
|
|
@ -78,20 +78,19 @@ describe('AuthLoader class', () => {
|
||||||
['accounts.0.canonicalID', 64],
|
['accounts.0.canonicalID', 64],
|
||||||
['accounts.0.keys', 'not an Array'],
|
['accounts.0.keys', 'not an Array'],
|
||||||
['accounts.0.keys', undefined],
|
['accounts.0.keys', undefined],
|
||||||
].forEach(test => {
|
].forEach(t => {
|
||||||
if (test[1] === undefined) {
|
if (t[1] === undefined) {
|
||||||
// Check a failure when deleting required fields
|
// Check a failure when deleting required fields
|
||||||
it(`should fail when missing field ${test[0]}`, done => {
|
test(`should fail when missing field ${t[0]}`, done => {
|
||||||
should._exec = shouldFail;
|
should._exec = shouldFail;
|
||||||
should.missingField(obj, test[0], done);
|
should.missingField(obj, t[0], done);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
// Check a failure when the type of field is different than
|
// Check a failure when the type of field is different than
|
||||||
// expected
|
// expected
|
||||||
it(`should fail when modified field ${test[0]} ${test[1]}`,
|
test(`should fail when modified field ${t[0]} ${t[1]}`, done => {
|
||||||
done => {
|
|
||||||
should._exec = shouldFail;
|
should._exec = shouldFail;
|
||||||
should.modifiedField(obj, test[0], test[1], done);
|
should.modifiedField(obj, t[0], t[1], done);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -103,30 +102,30 @@ describe('AuthLoader class', () => {
|
||||||
[
|
[
|
||||||
'accounts.0.keys',
|
'accounts.0.keys',
|
||||||
'accounts.0.users',
|
'accounts.0.users',
|
||||||
].forEach(test => {
|
].forEach(t => {
|
||||||
// Check a success when deleting optional fields
|
// Check a success when deleting optional fields
|
||||||
it(`should return success when missing field ${test}`, done => {
|
test(`should return success when missing field ${t}`, done => {
|
||||||
should._exec = shouldSucceed;
|
should._exec = shouldSucceed;
|
||||||
should.missingField(obj, test[0], done);
|
should.missingField(obj, t[0], done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should return error on two same canonicalID', done => {
|
test('Should return error on two same canonicalID', done => {
|
||||||
obj.accounts[0].canonicalID = obj.accounts[1].canonicalID;
|
obj.accounts[0].canonicalID = obj.accounts[1].canonicalID;
|
||||||
shouldFail(obj, done);
|
shouldFail(obj, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should return error on two same emails', done => {
|
test('Should return error on two same emails', done => {
|
||||||
obj.accounts[0].email = obj.accounts[1].email;
|
obj.accounts[0].email = obj.accounts[1].email;
|
||||||
shouldFail(obj, done);
|
shouldFail(obj, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should return error on two same arn', done => {
|
test('Should return error on two same arn', done => {
|
||||||
obj.accounts[0].arn = obj.accounts[1].arn;
|
obj.accounts[0].arn = obj.accounts[1].arn;
|
||||||
shouldFail(obj, done);
|
shouldFail(obj, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should return error on two same access key', done => {
|
test('Should return error on two same access key', done => {
|
||||||
obj.accounts[0].keys[0].access = obj.accounts[1].keys[0].access;
|
obj.accounts[0].keys[0].access = obj.accounts[1].keys[0].access;
|
||||||
shouldFail(obj, done);
|
shouldFail(obj, done);
|
||||||
});
|
});
|
||||||
|
|
|
@ -17,7 +17,7 @@ const searchEmail2 = 'sampleaccount4@sampling.com';
|
||||||
const expectCanId2 = 'newCanId';
|
const expectCanId2 = 'newCanId';
|
||||||
|
|
||||||
describe('S3 in_memory auth backend', () => {
|
describe('S3 in_memory auth backend', () => {
|
||||||
it('should find an account', done => {
|
test('should find an account', done => {
|
||||||
const backend = new Backend(JSON.parse(JSON.stringify(ref)));
|
const backend = new Backend(JSON.parse(JSON.stringify(ref)));
|
||||||
backend.getCanonicalIds([searchEmail], log, (err, res) => {
|
backend.getCanonicalIds([searchEmail], log, (err, res) => {
|
||||||
assert.strictEqual(res.message.body[searchEmail],
|
assert.strictEqual(res.message.body[searchEmail],
|
||||||
|
@ -26,7 +26,7 @@ describe('S3 in_memory auth backend', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should clear old account authdata on refresh', done => {
|
test('should clear old account authdata on refresh', done => {
|
||||||
const backend = new Backend(JSON.parse(JSON.stringify(ref)));
|
const backend = new Backend(JSON.parse(JSON.stringify(ref)));
|
||||||
backend.refreshAuthData(obj2);
|
backend.refreshAuthData(obj2);
|
||||||
backend.getCanonicalIds([searchEmail], log, (err, res) => {
|
backend.getCanonicalIds([searchEmail], log, (err, res) => {
|
||||||
|
@ -35,7 +35,7 @@ describe('S3 in_memory auth backend', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should add new account authdata on refresh', done => {
|
test('should add new account authdata on refresh', done => {
|
||||||
const backend = new Backend(JSON.parse(JSON.stringify(ref)));
|
const backend = new Backend(JSON.parse(JSON.stringify(ref)));
|
||||||
backend.refreshAuthData(obj2);
|
backend.refreshAuthData(obj2);
|
||||||
backend.getCanonicalIds([searchEmail2], log, (err, res) => {
|
backend.getCanonicalIds([searchEmail2], log, (err, res) => {
|
||||||
|
|
|
@ -14,28 +14,28 @@ describe('S3 AuthData Indexer', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should return account from canonicalID', done => {
|
test('Should return account from canonicalID', done => {
|
||||||
const res = index.getEntityByCanId(obj.accounts[0].canonicalID);
|
const res = index.getEntityByCanId(obj.accounts[0].canonicalID);
|
||||||
assert.strictEqual(typeof res, 'object');
|
assert.strictEqual(typeof res, 'object');
|
||||||
assert.strictEqual(res.arn, obj.accounts[0].arn);
|
assert.strictEqual(res.arn, obj.accounts[0].arn);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should return account from email', done => {
|
test('Should return account from email', done => {
|
||||||
const res = index.getEntityByEmail(obj.accounts[1].email);
|
const res = index.getEntityByEmail(obj.accounts[1].email);
|
||||||
assert.strictEqual(typeof res, 'object');
|
assert.strictEqual(typeof res, 'object');
|
||||||
assert.strictEqual(res.canonicalID, obj.accounts[1].canonicalID);
|
assert.strictEqual(res.canonicalID, obj.accounts[1].canonicalID);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Should return account from key', done => {
|
test('Should return account from key', done => {
|
||||||
const res = index.getEntityByKey(obj.accounts[0].keys[0].access);
|
const res = index.getEntityByKey(obj.accounts[0].keys[0].access);
|
||||||
assert.strictEqual(typeof res, 'object');
|
assert.strictEqual(typeof res, 'object');
|
||||||
assert.strictEqual(res.arn, obj.accounts[0].arn);
|
assert.strictEqual(res.arn, obj.accounts[0].arn);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should index account without keys', done => {
|
test('should index account without keys', done => {
|
||||||
should._exec = () => {
|
should._exec = () => {
|
||||||
index = new Indexer(obj);
|
index = new Indexer(obj);
|
||||||
const res = index.getEntityByEmail(obj.accounts[0].email);
|
const res = index.getEntityByEmail(obj.accounts[0].email);
|
||||||
|
@ -46,7 +46,7 @@ describe('S3 AuthData Indexer', () => {
|
||||||
should.missingField(obj, 'accounts.0.keys');
|
should.missingField(obj, 'accounts.0.keys');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should index account without users', done => {
|
test('should index account without users', done => {
|
||||||
should._exec = () => {
|
should._exec = () => {
|
||||||
index = new Indexer(obj);
|
index = new Indexer(obj);
|
||||||
const res = index.getEntityByEmail(obj.accounts[0].email);
|
const res = index.getEntityByEmail(obj.accounts[0].email);
|
||||||
|
|
|
@ -13,7 +13,8 @@ const gcpCanonicalizedResource = request =>
|
||||||
getCanonicalizedResource(request, 'GCP');
|
getCanonicalizedResource(request, 'GCP');
|
||||||
|
|
||||||
describe('canonicalization', () => {
|
describe('canonicalization', () => {
|
||||||
it('should construct a canonicalized header in the correct order for AWS',
|
test(
|
||||||
|
'should construct a canonicalized header in the correct order for AWS',
|
||||||
() => {
|
() => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
|
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
|
||||||
|
@ -34,9 +35,10 @@ describe('canonicalization', () => {
|
||||||
'x-amz-meta-compress:0\n' +
|
'x-amz-meta-compress:0\n' +
|
||||||
'x-amz-meta-meta:something very meta\n' +
|
'x-amz-meta-meta:something very meta\n' +
|
||||||
'x-amz-request-payer:requester\n');
|
'x-amz-request-payer:requester\n');
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return an empty string as the canonicalized ' +
|
test('should return an empty string as the canonicalized ' +
|
||||||
'header if no amz headers', () => {
|
'header if no amz headers', () => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
|
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
|
||||||
|
@ -49,7 +51,7 @@ describe('canonicalization', () => {
|
||||||
assert.strictEqual(canonicalizedHeader, '');
|
assert.strictEqual(canonicalizedHeader, '');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should construct a canonicalized resource for AWS', () => {
|
test('should construct a canonicalized resource for AWS', () => {
|
||||||
const request = {
|
const request = {
|
||||||
headers: { host: 'bucket.s3.amazonaws.com:80' },
|
headers: { host: 'bucket.s3.amazonaws.com:80' },
|
||||||
url: '/obj',
|
url: '/obj',
|
||||||
|
@ -65,7 +67,7 @@ describe('canonicalization', () => {
|
||||||
'/bucket/obj?requestPayment=yes,please');
|
'/bucket/obj?requestPayment=yes,please');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return the path as the canonicalized resource ' +
|
test('should return the path as the canonicalized resource ' +
|
||||||
'if no bucket name, overriding headers or delete query for AWS', () => {
|
'if no bucket name, overriding headers or delete query for AWS', () => {
|
||||||
const request = {
|
const request = {
|
||||||
headers: { host: 's3.amazonaws.com:80' },
|
headers: { host: 's3.amazonaws.com:80' },
|
||||||
|
@ -76,7 +78,7 @@ describe('canonicalization', () => {
|
||||||
assert.strictEqual(canonicalizedResource, '/');
|
assert.strictEqual(canonicalizedResource, '/');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should sort the subresources (included query params) in ' +
|
test('should sort the subresources (included query params) in ' +
|
||||||
'lexicographical order for AWS', () => {
|
'lexicographical order for AWS', () => {
|
||||||
const request = {
|
const request = {
|
||||||
headers: { host: 's3.amazonaws.com:80' },
|
headers: { host: 's3.amazonaws.com:80' },
|
||||||
|
@ -91,7 +93,8 @@ describe('canonicalization', () => {
|
||||||
'/?partNumber=5&uploadId=iamanuploadid');
|
'/?partNumber=5&uploadId=iamanuploadid');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should construct a canonicalized header in the correct order for GCP',
|
test(
|
||||||
|
'should construct a canonicalized header in the correct order for GCP',
|
||||||
() => {
|
() => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
|
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
|
||||||
|
@ -112,9 +115,10 @@ describe('canonicalization', () => {
|
||||||
'x-goog-meta-compress:0\n' +
|
'x-goog-meta-compress:0\n' +
|
||||||
'x-goog-meta-meta:something very meta\n' +
|
'x-goog-meta-meta:something very meta\n' +
|
||||||
'x-goog-request-payer:requester\n');
|
'x-goog-request-payer:requester\n');
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return an empty string as the canonicalized ' +
|
test('should return an empty string as the canonicalized ' +
|
||||||
'header if no goog headers', () => {
|
'header if no goog headers', () => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
|
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
|
||||||
|
@ -127,7 +131,7 @@ describe('canonicalization', () => {
|
||||||
assert.strictEqual(canonicalizedHeader, '');
|
assert.strictEqual(canonicalizedHeader, '');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should construct a canonicalized resource for GCP', () => {
|
test('should construct a canonicalized resource for GCP', () => {
|
||||||
const request = {
|
const request = {
|
||||||
headers: { host: 'bucket.storage.googapis.com:80' },
|
headers: { host: 'bucket.storage.googapis.com:80' },
|
||||||
url: '/obj',
|
url: '/obj',
|
||||||
|
@ -143,7 +147,7 @@ describe('canonicalization', () => {
|
||||||
'/bucket/obj?billing=yes,please');
|
'/bucket/obj?billing=yes,please');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return the path as the canonicalized resource ' +
|
test('should return the path as the canonicalized resource ' +
|
||||||
'if no bucket name, overriding headers or delete query for GCP', () => {
|
'if no bucket name, overriding headers or delete query for GCP', () => {
|
||||||
const request = {
|
const request = {
|
||||||
headers: { host: 'storage.googleapis.com:80' },
|
headers: { host: 'storage.googleapis.com:80' },
|
||||||
|
@ -155,7 +159,7 @@ describe('canonicalization', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should sort the subresources (included query params) in ' +
|
test('should sort the subresources (included query params) in ' +
|
||||||
'lexicographical order for GCP', () => {
|
'lexicographical order for GCP', () => {
|
||||||
const request = {
|
const request = {
|
||||||
headers: { host: 'storage.googleapis.com:80' },
|
headers: { host: 'storage.googleapis.com:80' },
|
||||||
|
|
|
@ -10,28 +10,28 @@ const errors = require('../../../../index').errors;
|
||||||
const log = new DummyRequestLogger();
|
const log = new DummyRequestLogger();
|
||||||
|
|
||||||
describe('checkTimestamp for timecheck in header auth', () => {
|
describe('checkTimestamp for timecheck in header auth', () => {
|
||||||
it('should return AccessDenied error if the date in the ' +
|
test('should return AccessDenied error if the date in the ' +
|
||||||
'header is before epochTime', () => {
|
'header is before epochTime', () => {
|
||||||
const timestamp = new Date('1950-01-01');
|
const timestamp = new Date('1950-01-01');
|
||||||
const timeoutResult = checkRequestExpiry(timestamp, log);
|
const timeoutResult = checkRequestExpiry(timestamp, log);
|
||||||
assert.deepStrictEqual(timeoutResult, errors.AccessDenied);
|
assert.deepStrictEqual(timeoutResult, errors.AccessDenied);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return RequestTimeTooSkewed error if the date in the ' +
|
test('should return RequestTimeTooSkewed error if the date in the ' +
|
||||||
'header is more than 15 minutes old', () => {
|
'header is more than 15 minutes old', () => {
|
||||||
const timestamp = new Date(Date.now() - 16 * 60000);
|
const timestamp = new Date(Date.now() - 16 * 60000);
|
||||||
const timeoutResult = checkRequestExpiry(timestamp, log);
|
const timeoutResult = checkRequestExpiry(timestamp, log);
|
||||||
assert.deepStrictEqual(timeoutResult, errors.RequestTimeTooSkewed);
|
assert.deepStrictEqual(timeoutResult, errors.RequestTimeTooSkewed);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return RequestTimeTooSkewed error if the date in ' +
|
test('should return RequestTimeTooSkewed error if the date in ' +
|
||||||
'the header is more than 15 minutes in the future', () => {
|
'the header is more than 15 minutes in the future', () => {
|
||||||
const timestamp = new Date(Date.now() + 16 * 60000);
|
const timestamp = new Date(Date.now() + 16 * 60000);
|
||||||
const timeoutResult = checkRequestExpiry(timestamp, log);
|
const timeoutResult = checkRequestExpiry(timestamp, log);
|
||||||
assert.deepStrictEqual(timeoutResult, errors.RequestTimeTooSkewed);
|
assert.deepStrictEqual(timeoutResult, errors.RequestTimeTooSkewed);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return no error if the date in the header is ' +
|
test('should return no error if the date in the header is ' +
|
||||||
'within 15 minutes of current time', () => {
|
'within 15 minutes of current time', () => {
|
||||||
const timestamp = new Date();
|
const timestamp = new Date();
|
||||||
const timeoutResult = checkRequestExpiry(timestamp, log);
|
const timeoutResult = checkRequestExpiry(timestamp, log);
|
||||||
|
|
|
@ -9,7 +9,7 @@ const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
|
||||||
const log = new DummyRequestLogger();
|
const log = new DummyRequestLogger();
|
||||||
|
|
||||||
describe('v2 constructStringToSign function', () => {
|
describe('v2 constructStringToSign function', () => {
|
||||||
it('should construct a stringToSign with query params treated ' +
|
test('should construct a stringToSign with query params treated ' +
|
||||||
'like headers (e.g. x-amz-acl) for AWS', () => {
|
'like headers (e.g. x-amz-acl) for AWS', () => {
|
||||||
const request = {
|
const request = {
|
||||||
url: '/noderocks/cuteotter.jpeg?AWSAccessKeyId' +
|
url: '/noderocks/cuteotter.jpeg?AWSAccessKeyId' +
|
||||||
|
@ -45,7 +45,7 @@ describe('v2 constructStringToSign function', () => {
|
||||||
assert.strictEqual(actualOutput, expectedOutput);
|
assert.strictEqual(actualOutput, expectedOutput);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should construct a stringToSign with query params treated ' +
|
test('should construct a stringToSign with query params treated ' +
|
||||||
'like headers (e.g. x-goog-acl) for GCP', () => {
|
'like headers (e.g. x-goog-acl) for GCP', () => {
|
||||||
const request = {
|
const request = {
|
||||||
url: '/noderocks/cuteotter.jpeg?AWSAccessKeyId' +
|
url: '/noderocks/cuteotter.jpeg?AWSAccessKeyId' +
|
||||||
|
|
|
@ -11,14 +11,14 @@ describe('v2: headerAuthCheck', () => {
|
||||||
{ token: undefined, error: false },
|
{ token: undefined, error: false },
|
||||||
{ token: 'invalid-token', error: true },
|
{ token: 'invalid-token', error: true },
|
||||||
{ token: 'a'.repeat(128), error: false },
|
{ token: 'a'.repeat(128), error: false },
|
||||||
].forEach(test => it(`test with token(${test.token})`, () => {
|
].forEach(t => test(`test with token(${t.token})`, () => {
|
||||||
const request = {
|
const request = {
|
||||||
headers: {
|
headers: {
|
||||||
'x-amz-security-token': test.token,
|
'x-amz-security-token': t.token,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const res = headerAuthCheck(request, log, {});
|
const res = headerAuthCheck(request, log, {});
|
||||||
if (test.error) {
|
if (t.error) {
|
||||||
assert.notStrictEqual(res.err, undefined);
|
assert.notStrictEqual(res.err, undefined);
|
||||||
assert.strictEqual(res.err.InvalidToken, true);
|
assert.strictEqual(res.err.InvalidToken, true);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -13,7 +13,7 @@ const RequestContext =
|
||||||
const logger = new DummyRequestLogger();
|
const logger = new DummyRequestLogger();
|
||||||
|
|
||||||
describe('Public Access', () => {
|
describe('Public Access', () => {
|
||||||
it('should grant access to a user that provides absolutely' +
|
test('should grant access to a user that provides absolutely' +
|
||||||
'no authentication information and should assign that user the ' +
|
'no authentication information and should assign that user the ' +
|
||||||
'All Users Group accessKey', done => {
|
'All Users Group accessKey', done => {
|
||||||
const request = {
|
const request = {
|
||||||
|
@ -38,7 +38,7 @@ describe('Public Access', () => {
|
||||||
}, 's3', requestContext);
|
}, 's3', requestContext);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not grant access to a request that contains ' +
|
test('should not grant access to a request that contains ' +
|
||||||
'an authorization header without proper credentials', done => {
|
'an authorization header without proper credentials', done => {
|
||||||
const request = {
|
const request = {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
|
|
|
@ -11,13 +11,13 @@ describe('v2: queryAuthCheck', () => {
|
||||||
{ token: undefined, error: false },
|
{ token: undefined, error: false },
|
||||||
{ token: 'invalid-token', error: true },
|
{ token: 'invalid-token', error: true },
|
||||||
{ token: 'a'.repeat(128), error: false },
|
{ token: 'a'.repeat(128), error: false },
|
||||||
].forEach(test => it(`test with token(${test.token})`, () => {
|
].forEach(t => test(`test with token(${t.token})`, () => {
|
||||||
const request = { method: 'GET' };
|
const request = { method: 'GET' };
|
||||||
const data = {
|
const data = {
|
||||||
SecurityToken: test.token,
|
SecurityToken: t.token,
|
||||||
};
|
};
|
||||||
const res = queryAuthCheck(request, log, data);
|
const res = queryAuthCheck(request, log, data);
|
||||||
if (test.error) {
|
if (t.error) {
|
||||||
assert.notStrictEqual(res.err, undefined);
|
assert.notStrictEqual(res.err, undefined);
|
||||||
assert.strictEqual(res.err.InvalidToken, true);
|
assert.strictEqual(res.err.InvalidToken, true);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -14,7 +14,7 @@ const gcpConstructStringToSign = (request, query, log) =>
|
||||||
constructStringToSign(request, query, log, 'GCP');
|
constructStringToSign(request, query, log, 'GCP');
|
||||||
|
|
||||||
describe('checkAuth reconstruction of signature', () => {
|
describe('checkAuth reconstruction of signature', () => {
|
||||||
it('should reconstruct the signature for a ' +
|
test('should reconstruct the signature for a ' +
|
||||||
'GET request from s3-curl for AWS', () => {
|
'GET request from s3-curl for AWS', () => {
|
||||||
// Based on s3-curl run
|
// Based on s3-curl run
|
||||||
const request = {
|
const request = {
|
||||||
|
@ -35,7 +35,7 @@ describe('checkAuth reconstruction of signature', () => {
|
||||||
assert.strictEqual(reconstructedSig, 'MJNF7AqNapSu32TlBOVkcAxj58c=');
|
assert.strictEqual(reconstructedSig, 'MJNF7AqNapSu32TlBOVkcAxj58c=');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should reconstruct the signature for a GET request from ' +
|
test('should reconstruct the signature for a GET request from ' +
|
||||||
'CyberDuck for AWS', () => {
|
'CyberDuck for AWS', () => {
|
||||||
// Based on CyberDuck request
|
// Based on CyberDuck request
|
||||||
const request = {
|
const request = {
|
||||||
|
@ -58,7 +58,7 @@ describe('checkAuth reconstruction of signature', () => {
|
||||||
assert.strictEqual(reconstructedSig, 'V8g5UJUFmMzruMqUHVT6ZwvUw+M=');
|
assert.strictEqual(reconstructedSig, 'V8g5UJUFmMzruMqUHVT6ZwvUw+M=');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should reconstruct the signature for a PUT request from ' +
|
test('should reconstruct the signature for a PUT request from ' +
|
||||||
's3cmd for AWS', () => {
|
's3cmd for AWS', () => {
|
||||||
// Based on s3cmd run
|
// Based on s3cmd run
|
||||||
const request = {
|
const request = {
|
||||||
|
@ -86,7 +86,7 @@ describe('checkAuth reconstruction of signature', () => {
|
||||||
assert.strictEqual(reconstructedSig, 'fWPcicKn7Fhzfje/0pRTifCxL44=');
|
assert.strictEqual(reconstructedSig, 'fWPcicKn7Fhzfje/0pRTifCxL44=');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should reconstruct the signature for a ' +
|
test('should reconstruct the signature for a ' +
|
||||||
'GET request from s3-curl for GCP', () => {
|
'GET request from s3-curl for GCP', () => {
|
||||||
// Based on s3-curl run
|
// Based on s3-curl run
|
||||||
const request = {
|
const request = {
|
||||||
|
@ -109,7 +109,7 @@ describe('checkAuth reconstruction of signature', () => {
|
||||||
assert.strictEqual(reconstructedSig, 'MJNF7AqNapSu32TlBOVkcAxj58c=');
|
assert.strictEqual(reconstructedSig, 'MJNF7AqNapSu32TlBOVkcAxj58c=');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should reconstruct the signature for a GET request from ' +
|
test('should reconstruct the signature for a GET request from ' +
|
||||||
'CyberDuck for GCP', () => {
|
'CyberDuck for GCP', () => {
|
||||||
// Based on CyberDuck request
|
// Based on CyberDuck request
|
||||||
const request = {
|
const request = {
|
||||||
|
@ -134,7 +134,7 @@ describe('checkAuth reconstruction of signature', () => {
|
||||||
assert.strictEqual(reconstructedSig, 'bdcnXSDhpN0lR2NBUlayg4vmMDU=');
|
assert.strictEqual(reconstructedSig, 'bdcnXSDhpN0lR2NBUlayg4vmMDU=');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should reconstruct the signature for a PUT request from ' +
|
test('should reconstruct the signature for a PUT request from ' +
|
||||||
's3cmd for GCP', () => {
|
's3cmd for GCP', () => {
|
||||||
// Based on s3cmd run
|
// Based on s3cmd run
|
||||||
const request = {
|
const request = {
|
||||||
|
|
|
@ -8,7 +8,7 @@ const awsURIencode = require('../../../../lib/auth/v4/awsURIencode');
|
||||||
// AWS.util.uriEscapePath and AWS.util.uriEscape functions
|
// AWS.util.uriEscapePath and AWS.util.uriEscape functions
|
||||||
// (see aws-sdk lib/signers/v4.js)
|
// (see aws-sdk lib/signers/v4.js)
|
||||||
describe('should URIencode in accordance with AWS rules', () => {
|
describe('should URIencode in accordance with AWS rules', () => {
|
||||||
it('should not encode / if give false argument', () => {
|
test('should not encode / if give false argument', () => {
|
||||||
const input1 = '/s3amazonaws.com/?$*@whateverASFEFWE()@)(*#@+ )';
|
const input1 = '/s3amazonaws.com/?$*@whateverASFEFWE()@)(*#@+ )';
|
||||||
const expectedOutput1 = '/s3amazonaws.com/%3F%24%2A%40whatever' +
|
const expectedOutput1 = '/s3amazonaws.com/%3F%24%2A%40whatever' +
|
||||||
'ASFEFWE%28%29%40%29%28%2A%23%40%2B%20%20%29';
|
'ASFEFWE%28%29%40%29%28%2A%23%40%2B%20%20%29';
|
||||||
|
@ -24,7 +24,7 @@ describe('should URIencode in accordance with AWS rules', () => {
|
||||||
assert.strictEqual(actualOutput2, expectedOutput2);
|
assert.strictEqual(actualOutput2, expectedOutput2);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should encode / if no second argument given', () => {
|
test('should encode / if no second argument given', () => {
|
||||||
const input1 = '/s3amazonaws.com/?$*@whateverASFEFWE()@)(*#@+ )';
|
const input1 = '/s3amazonaws.com/?$*@whateverASFEFWE()@)(*#@+ )';
|
||||||
const expectedOutput1 = '%2Fs3amazonaws.com%2F%3F%24%2A%40whatever' +
|
const expectedOutput1 = '%2Fs3amazonaws.com%2F%3F%24%2A%40whatever' +
|
||||||
'ASFEFWE%28%29%40%29%28%2A%23%40%2B%20%20%29';
|
'ASFEFWE%28%29%40%29%28%2A%23%40%2B%20%20%29';
|
||||||
|
@ -40,7 +40,7 @@ describe('should URIencode in accordance with AWS rules', () => {
|
||||||
assert.strictEqual(actualOutput2, expectedOutput2);
|
assert.strictEqual(actualOutput2, expectedOutput2);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should encode native language characters', () => {
|
test('should encode native language characters', () => {
|
||||||
const input = '/s3amazonaws.com/Pâtisserie=中文-español-English' +
|
const input = '/s3amazonaws.com/Pâtisserie=中文-español-English' +
|
||||||
'-हिन्दी-العربية-português-বাংলা-русский-日本語-ਪੰਜਾਬੀ-한국어-தமிழ்';
|
'-हिन्दी-العربية-português-বাংলা-русский-日本語-ਪੰਜਾਬੀ-한국어-தமிழ்';
|
||||||
const expectedOutput = '%2Fs3amazonaws.com%2FP%C3%A2tisserie%3D%E4' +
|
const expectedOutput = '%2Fs3amazonaws.com%2FP%C3%A2tisserie%3D%E4' +
|
||||||
|
|
|
@ -14,7 +14,7 @@ const log = new DummyRequestLogger();
|
||||||
describe(item.desc, () => {
|
describe(item.desc, () => {
|
||||||
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
||||||
// latest/API/sig-v4-header-based-auth.html
|
// latest/API/sig-v4-header-based-auth.html
|
||||||
it('should construct a stringToSign in accordance ' +
|
test('should construct a stringToSign in accordance ' +
|
||||||
'with AWS rules for a get object request (header auth)', () => {
|
'with AWS rules for a get object request (header auth)', () => {
|
||||||
const path = '/test.txt';
|
const path = '/test.txt';
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -55,7 +55,7 @@ const log = new DummyRequestLogger();
|
||||||
|
|
||||||
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
||||||
// latest/API/sig-v4-header-based-auth.html
|
// latest/API/sig-v4-header-based-auth.html
|
||||||
it('should construct a stringToSign in accordance ' +
|
test('should construct a stringToSign in accordance ' +
|
||||||
'with AWS rules for a put object request (header auth)', () => {
|
'with AWS rules for a put object request (header auth)', () => {
|
||||||
const path = '/test$file.text';
|
const path = '/test$file.text';
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -98,9 +98,8 @@ const log = new DummyRequestLogger();
|
||||||
|
|
||||||
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
||||||
// latest/API/sig-v4-header-based-auth.html
|
// latest/API/sig-v4-header-based-auth.html
|
||||||
it('should construct a stringToSign in accordance ' +
|
test('should construct a stringToSign in accordance ' +
|
||||||
'with AWS rules for a pre-signed get url request (query auth)',
|
'with AWS rules for a pre-signed get url request (query auth)', () => {
|
||||||
() => {
|
|
||||||
const path = '/test.txt';
|
const path = '/test.txt';
|
||||||
const params = {
|
const params = {
|
||||||
request: {
|
request: {
|
||||||
|
|
|
@ -9,7 +9,7 @@ const createCanonicalRequest =
|
||||||
describe('createCanonicalRequest function', () => {
|
describe('createCanonicalRequest function', () => {
|
||||||
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
||||||
// latest/API/sig-v4-header-based-auth.html
|
// latest/API/sig-v4-header-based-auth.html
|
||||||
it('should construct a canonical request in accordance ' +
|
test('should construct a canonical request in accordance ' +
|
||||||
'with AWS rules for a get object request (header auth)', () => {
|
'with AWS rules for a get object request (header auth)', () => {
|
||||||
const params = {
|
const params = {
|
||||||
pHttpVerb: 'GET',
|
pHttpVerb: 'GET',
|
||||||
|
@ -48,7 +48,7 @@ describe('createCanonicalRequest function', () => {
|
||||||
|
|
||||||
const msg = 'S3C-820: aws java sdk should not encode * ' +
|
const msg = 'S3C-820: aws java sdk should not encode * ' +
|
||||||
'character for signature';
|
'character for signature';
|
||||||
it(msg, () => {
|
test(msg, () => {
|
||||||
const doc = JSON.stringify({
|
const doc = JSON.stringify({
|
||||||
Statement: [{
|
Statement: [{
|
||||||
Action: 's3:*',
|
Action: 's3:*',
|
||||||
|
@ -92,7 +92,7 @@ describe('createCanonicalRequest function', () => {
|
||||||
|
|
||||||
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
|
||||||
// latest/API/sig-v4-header-based-auth.html
|
// latest/API/sig-v4-header-based-auth.html
|
||||||
it('should construct a canonical request in accordance ' +
|
test('should construct a canonical request in accordance ' +
|
||||||
'with AWS rules for a put object request (header auth)', () => {
|
'with AWS rules for a put object request (header auth)', () => {
|
||||||
const params = {
|
const params = {
|
||||||
pHttpVerb: 'PUT',
|
pHttpVerb: 'PUT',
|
||||||
|
@ -134,7 +134,7 @@ describe('createCanonicalRequest function', () => {
|
||||||
|
|
||||||
// Example taken from: http://docs.aws.amazon.com/AmazonS3/latest/API/
|
// Example taken from: http://docs.aws.amazon.com/AmazonS3/latest/API/
|
||||||
// sigv4-query-string-auth.html
|
// sigv4-query-string-auth.html
|
||||||
it('should construct a canonical request in accordance ' +
|
test('should construct a canonical request in accordance ' +
|
||||||
'with AWS rules for a pre-signed get url request (query auth)', () => {
|
'with AWS rules for a pre-signed get url request (query auth)', () => {
|
||||||
const params = {
|
const params = {
|
||||||
pHttpVerb: 'GET',
|
pHttpVerb: 'GET',
|
||||||
|
@ -167,7 +167,7 @@ describe('createCanonicalRequest function', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should construct a canonical request that contains upper and ' +
|
test('should construct a canonical request that contains upper and ' +
|
||||||
'lower case query params and query params treated like headers ' +
|
'lower case query params and query params treated like headers ' +
|
||||||
'(x-amz-acl)', () => {
|
'(x-amz-acl)', () => {
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -208,7 +208,7 @@ describe('createCanonicalRequest function', () => {
|
||||||
assert.strictEqual(actualOutput, expectedOutput);
|
assert.strictEqual(actualOutput, expectedOutput);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should construct a canonical request that contains a ' +
|
test('should construct a canonical request that contains a ' +
|
||||||
'signed header with an empty string value', () => {
|
'signed header with an empty string value', () => {
|
||||||
const params = {
|
const params = {
|
||||||
pHttpVerb: 'PUT',
|
pHttpVerb: 'PUT',
|
||||||
|
@ -231,7 +231,7 @@ describe('createCanonicalRequest function', () => {
|
||||||
assert.strictEqual(actualOutput, expectedOutput);
|
assert.strictEqual(actualOutput, expectedOutput);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should construct a canonical request that contains a ' +
|
test('should construct a canonical request that contains a ' +
|
||||||
'signed expect header even if expect header value was ' +
|
'signed expect header even if expect header value was ' +
|
||||||
'stripped by the load balancer', () => {
|
'stripped by the load balancer', () => {
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -254,7 +254,7 @@ describe('createCanonicalRequest function', () => {
|
||||||
assert.strictEqual(actualOutput, expectedOutput);
|
assert.strictEqual(actualOutput, expectedOutput);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should trim white space in a canonical header value so that ' +
|
test('should trim white space in a canonical header value so that ' +
|
||||||
'there is no white space before or after a value and any sequential ' +
|
'there is no white space before or after a value and any sequential ' +
|
||||||
'white space becomes a single space', () => {
|
'white space becomes a single space', () => {
|
||||||
const params = {
|
const params = {
|
||||||
|
|
|
@ -40,11 +40,11 @@ describe('v4 headerAuthCheck', () => {
|
||||||
{ token: undefined, error: false },
|
{ token: undefined, error: false },
|
||||||
{ token: 'invalid-token', error: true },
|
{ token: 'invalid-token', error: true },
|
||||||
{ token: 'a'.repeat(128), error: false },
|
{ token: 'a'.repeat(128), error: false },
|
||||||
].forEach(test => it(`test with token(${test.token})`, () => {
|
].forEach(t => test(`test with token(${t.token})`, () => {
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
'x-amz-security-token': test.token }, 'headers', request, headers);
|
'x-amz-security-token': t.token }, 'headers', request, headers);
|
||||||
const res = headerAuthCheck(alteredRequest, log);
|
const res = headerAuthCheck(alteredRequest, log);
|
||||||
if (test.error) {
|
if (t.error) {
|
||||||
assert.notStrictEqual(res.err, undefined);
|
assert.notStrictEqual(res.err, undefined);
|
||||||
assert.strictEqual(res.err.InvalidToken, true);
|
assert.strictEqual(res.err.InvalidToken, true);
|
||||||
} else {
|
} else {
|
||||||
|
@ -52,7 +52,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
it('should return error if undefined authorization header', done => {
|
test('should return error if undefined authorization header', done => {
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
authorization: undefined }, 'headers', request, headers);
|
authorization: undefined }, 'headers', request, headers);
|
||||||
const res = headerAuthCheck(alteredRequest, log);
|
const res = headerAuthCheck(alteredRequest, log);
|
||||||
|
@ -60,7 +60,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if undefined sha256 header', done => {
|
test('should return error if undefined sha256 header', done => {
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
'x-amz-content-sha256': undefined }, 'headers', request, headers);
|
'x-amz-content-sha256': undefined }, 'headers', request, headers);
|
||||||
const res = headerAuthCheck(alteredRequest, log);
|
const res = headerAuthCheck(alteredRequest, log);
|
||||||
|
@ -68,7 +68,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if host is not included as signed header', done => {
|
test('should return error if host is not included as signed header', done => {
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
authorization: 'AWS4-HMAC-SHA256 Credential=accessKey1/20160208' +
|
authorization: 'AWS4-HMAC-SHA256 Credential=accessKey1/20160208' +
|
||||||
'/us-east-1/s3/aws4_request, ' +
|
'/us-east-1/s3/aws4_request, ' +
|
||||||
|
@ -81,7 +81,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if an x-amz header is not included as signed ' +
|
test('should return error if an x-amz header is not included as signed ' +
|
||||||
'header but is in request', done => {
|
'header but is in request', done => {
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
'x-amz-acl': 'public' }, 'headers', request, headers);
|
'x-amz-acl': 'public' }, 'headers', request, headers);
|
||||||
|
@ -90,7 +90,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if an x-scal header is not included as signed ' +
|
test('should return error if an x-scal header is not included as signed ' +
|
||||||
'header but is in request', done => {
|
'header but is in request', done => {
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
'x-scal-encryption': 'true' }, 'headers', request, headers);
|
'x-scal-encryption': 'true' }, 'headers', request, headers);
|
||||||
|
@ -99,7 +99,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if missing credentials', done => {
|
test('should return error if missing credentials', done => {
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
authorization: 'AWS4-HMAC-SHA256 SignedHeaders=host;' +
|
authorization: 'AWS4-HMAC-SHA256 SignedHeaders=host;' +
|
||||||
'x-amz-content-sha256;x-amz-date, Signature=abed9' +
|
'x-amz-content-sha256;x-amz-date, Signature=abed9' +
|
||||||
|
@ -110,7 +110,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if missing SignedHeaders', done => {
|
test('should return error if missing SignedHeaders', done => {
|
||||||
// 'Sigheaders' instead of SignedHeaders in authorization
|
// 'Sigheaders' instead of SignedHeaders in authorization
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
authorization: 'AWS4-HMAC-SHA256 Credential=accessKey1' +
|
authorization: 'AWS4-HMAC-SHA256 Credential=accessKey1' +
|
||||||
|
@ -124,7 +124,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if missing Signature', done => {
|
test('should return error if missing Signature', done => {
|
||||||
// Sig instead of 'Signature' in authorization
|
// Sig instead of 'Signature' in authorization
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
authorization: 'AWS4-HMAC-SHA256 Credential=accessKey1' +
|
authorization: 'AWS4-HMAC-SHA256 Credential=accessKey1' +
|
||||||
|
@ -138,7 +138,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if missing timestamp', done => {
|
test('should return error if missing timestamp', done => {
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
'x-amz-date': undefined }, 'headers', request, headers);
|
'x-amz-date': undefined }, 'headers', request, headers);
|
||||||
const res = headerAuthCheck(alteredRequest, log);
|
const res = headerAuthCheck(alteredRequest, log);
|
||||||
|
@ -148,7 +148,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if scope date does not ' +
|
test('should return error if scope date does not ' +
|
||||||
'match timestamp date', done => {
|
'match timestamp date', done => {
|
||||||
// Different timestamp (2015 instead of 2016)
|
// Different timestamp (2015 instead of 2016)
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
|
@ -158,7 +158,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if timestamp from x-amz-date header' +
|
test('should return error if timestamp from x-amz-date header' +
|
||||||
'is before epochTime', done => {
|
'is before epochTime', done => {
|
||||||
// Different date (2095 instead of 2016)
|
// Different date (2095 instead of 2016)
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
|
@ -176,7 +176,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if timestamp from x-amz-date header' +
|
test('should return error if timestamp from x-amz-date header' +
|
||||||
'is in the future', done => {
|
'is in the future', done => {
|
||||||
// Different date (2095 instead of 2016)
|
// Different date (2095 instead of 2016)
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
|
@ -192,7 +192,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if timestamp from date header' +
|
test('should return error if timestamp from date header' +
|
||||||
' is in the future (and there is no x-amz-date header)', done => {
|
' is in the future (and there is no x-amz-date header)', done => {
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
date: 'Tue, 08 Feb 2095 20:14:05 GMT',
|
date: 'Tue, 08 Feb 2095 20:14:05 GMT',
|
||||||
|
@ -209,7 +209,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if timestamp from x-amz-date header' +
|
test('should return error if timestamp from x-amz-date header' +
|
||||||
'is too old', done => {
|
'is too old', done => {
|
||||||
// Different scope date and x-amz-date (2015 instead of 2016)
|
// Different scope date and x-amz-date (2015 instead of 2016)
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
|
@ -226,7 +226,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if timestamp from date header' +
|
test('should return error if timestamp from date header' +
|
||||||
'is too old (and there is no x-amz-date header)', done => {
|
'is too old (and there is no x-amz-date header)', done => {
|
||||||
// Different scope date (2015 instead of 2016) and date in 2015
|
// Different scope date (2015 instead of 2016) and date in 2015
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
|
@ -244,7 +244,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not return error due to unknown region', done => {
|
test('should not return error due to unknown region', done => {
|
||||||
// Returning an error causes an issue for certain clients.
|
// Returning an error causes an issue for certain clients.
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
authorization: 'AWS4-HMAC-SHA256 Credential=accessKey1/20160208' +
|
authorization: 'AWS4-HMAC-SHA256 Credential=accessKey1/20160208' +
|
||||||
|
@ -260,7 +260,7 @@ describe('v4 headerAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should successfully return v4 and no error', done => {
|
test('should successfully return v4 and no error', done => {
|
||||||
// Freezes time so date created within function will be Feb 8, 2016
|
// Freezes time so date created within function will be Feb 8, 2016
|
||||||
const clock = lolex.install(1454962445000);
|
const clock = lolex.install(1454962445000);
|
||||||
const res = headerAuthCheck(request, log);
|
const res = headerAuthCheck(request, log);
|
||||||
|
|
|
@ -35,7 +35,7 @@ const request = {
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('v4 queryAuthCheck', () => {
|
describe('v4 queryAuthCheck', () => {
|
||||||
it('should return error if algorithm param incorrect', done => {
|
test('should return error if algorithm param incorrect', done => {
|
||||||
const alteredRequest = createAlteredRequest({ 'X-Amz-Algorithm':
|
const alteredRequest = createAlteredRequest({ 'X-Amz-Algorithm':
|
||||||
'AWS4-HMAC-SHA1' }, 'query', request, query);
|
'AWS4-HMAC-SHA1' }, 'query', request, query);
|
||||||
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
||||||
|
@ -43,7 +43,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if X-Amz-Credential param is undefined', done => {
|
test('should return error if X-Amz-Credential param is undefined', done => {
|
||||||
const alteredRequest = createAlteredRequest({ 'X-Amz-Credential':
|
const alteredRequest = createAlteredRequest({ 'X-Amz-Credential':
|
||||||
undefined }, 'query', request, query);
|
undefined }, 'query', request, query);
|
||||||
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
||||||
|
@ -51,7 +51,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if credential param format incorrect', done => {
|
test('should return error if credential param format incorrect', done => {
|
||||||
const alteredRequest = createAlteredRequest({ 'X-Amz-Credential':
|
const alteredRequest = createAlteredRequest({ 'X-Amz-Credential':
|
||||||
'incorrectformat' }, 'query', request, query);
|
'incorrectformat' }, 'query', request, query);
|
||||||
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
||||||
|
@ -59,7 +59,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if service set forth in ' +
|
test('should return error if service set forth in ' +
|
||||||
'credential param is not s3', done => {
|
'credential param is not s3', done => {
|
||||||
const alteredRequest = createAlteredRequest({ 'X-Amz-Credential':
|
const alteredRequest = createAlteredRequest({ 'X-Amz-Credential':
|
||||||
'accessKey1/20160208/us-east-1/EC2/aws4_request' },
|
'accessKey1/20160208/us-east-1/EC2/aws4_request' },
|
||||||
|
@ -69,7 +69,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if requestType set forth in ' +
|
test('should return error if requestType set forth in ' +
|
||||||
'credential param is not aws4_request', done => {
|
'credential param is not aws4_request', done => {
|
||||||
const alteredRequest = createAlteredRequest({ 'X-Amz-Credential':
|
const alteredRequest = createAlteredRequest({ 'X-Amz-Credential':
|
||||||
'accessKey1/20160208/us-east-1/s3/aws2_request' },
|
'accessKey1/20160208/us-east-1/s3/aws2_request' },
|
||||||
|
@ -79,7 +79,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if undefined X-Amz-SignedHeaders param', done => {
|
test('should return error if undefined X-Amz-SignedHeaders param', done => {
|
||||||
const alteredRequest = createAlteredRequest({ 'X-Amz-SignedHeaders':
|
const alteredRequest = createAlteredRequest({ 'X-Amz-SignedHeaders':
|
||||||
undefined }, 'query', request, query);
|
undefined }, 'query', request, query);
|
||||||
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
||||||
|
@ -87,7 +87,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if undefined X-Amz-Signature param', done => {
|
test('should return error if undefined X-Amz-Signature param', done => {
|
||||||
const alteredRequest = createAlteredRequest({ 'X-Amz-Signature':
|
const alteredRequest = createAlteredRequest({ 'X-Amz-Signature':
|
||||||
undefined }, 'query', request, query);
|
undefined }, 'query', request, query);
|
||||||
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
||||||
|
@ -95,7 +95,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if host is not included as signed header', done => {
|
test('should return error if host is not included as signed header', done => {
|
||||||
const alteredRequest = createAlteredRequest({ 'X-Amz-SignedHeaders':
|
const alteredRequest = createAlteredRequest({ 'X-Amz-SignedHeaders':
|
||||||
'none' }, 'query', request, query);
|
'none' }, 'query', request, query);
|
||||||
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
||||||
|
@ -103,7 +103,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if an x-amz header is not included as signed ' +
|
test('should return error if an x-amz header is not included as signed ' +
|
||||||
'header but is in request', done => {
|
'header but is in request', done => {
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
'x-amz-acl': 'public' }, 'headers', request, headers);
|
'x-amz-acl': 'public' }, 'headers', request, headers);
|
||||||
|
@ -112,7 +112,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if an x-scal header is not included as signed ' +
|
test('should return error if an x-scal header is not included as signed ' +
|
||||||
'header but is in request', done => {
|
'header but is in request', done => {
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
'x-scal-encryption': 'true' }, 'headers', request, headers);
|
'x-scal-encryption': 'true' }, 'headers', request, headers);
|
||||||
|
@ -121,7 +121,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if undefined X-Amz-Date param', done => {
|
test('should return error if undefined X-Amz-Date param', done => {
|
||||||
const alteredRequest = createAlteredRequest({ 'X-Amz-Date':
|
const alteredRequest = createAlteredRequest({ 'X-Amz-Date':
|
||||||
undefined }, 'query', request, query);
|
undefined }, 'query', request, query);
|
||||||
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
||||||
|
@ -129,7 +129,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if undefined X-Amz-Expires param', done => {
|
test('should return error if undefined X-Amz-Expires param', done => {
|
||||||
const alteredRequest = createAlteredRequest({ 'X-Amz-Expires':
|
const alteredRequest = createAlteredRequest({ 'X-Amz-Expires':
|
||||||
undefined }, 'query', request, query);
|
undefined }, 'query', request, query);
|
||||||
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
|
||||||
|
@ -137,7 +137,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if X-Amz-Expires param ' +
|
test('should return error if X-Amz-Expires param ' +
|
||||||
'is less than 1', done => {
|
'is less than 1', done => {
|
||||||
const alteredRequest = createAlteredRequest({ 'X-Amz-Expires':
|
const alteredRequest = createAlteredRequest({ 'X-Amz-Expires':
|
||||||
0 }, 'query', request, query);
|
0 }, 'query', request, query);
|
||||||
|
@ -146,7 +146,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if X-Amz-Expires param ' +
|
test('should return error if X-Amz-Expires param ' +
|
||||||
'is greater than 604800', done => {
|
'is greater than 604800', done => {
|
||||||
// Greater than 604800 seconds (7 days)
|
// Greater than 604800 seconds (7 days)
|
||||||
const alteredRequest = createAlteredRequest({ 'X-Amz-Expires':
|
const alteredRequest = createAlteredRequest({ 'X-Amz-Expires':
|
||||||
|
@ -156,7 +156,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if X-Amz-Date param is in the future', done => {
|
test('should return error if X-Amz-Date param is in the future', done => {
|
||||||
// 2095 instead of 2016
|
// 2095 instead of 2016
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
'X-Amz-Date': '20950208T234304Z',
|
'X-Amz-Date': '20950208T234304Z',
|
||||||
|
@ -167,7 +167,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if X-Amz-Date param is too old', done => {
|
test('should return error if X-Amz-Date param is too old', done => {
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
'X-Amz-Date': '20160208T234304Z',
|
'X-Amz-Date': '20160208T234304Z',
|
||||||
}, 'query', request, query);
|
}, 'query', request, query);
|
||||||
|
@ -176,7 +176,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if scope date from X-Amz-Credential param' +
|
test('should return error if scope date from X-Amz-Credential param' +
|
||||||
'does not match date from X-Amz-Date param', done => {
|
'does not match date from X-Amz-Date param', done => {
|
||||||
const clock = lolex.install(1454974984001);
|
const clock = lolex.install(1454974984001);
|
||||||
const alteredRequest = createAlteredRequest({
|
const alteredRequest = createAlteredRequest({
|
||||||
|
@ -189,7 +189,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should successfully return v4 and no error', done => {
|
test('should successfully return v4 and no error', done => {
|
||||||
// Freezes time so date created within function will be Feb 8, 2016
|
// Freezes time so date created within function will be Feb 8, 2016
|
||||||
// (within 15 minutes of timestamp in request)
|
// (within 15 minutes of timestamp in request)
|
||||||
const clock = lolex.install(1454974984001);
|
const clock = lolex.install(1454974984001);
|
||||||
|
@ -200,7 +200,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should successfully return v4 and no error if X-Amz-Expires param ' +
|
test('should successfully return v4 and no error if X-Amz-Expires param ' +
|
||||||
'is 604800 (7 days)', done => {
|
'is 604800 (7 days)', done => {
|
||||||
// Freezes time so date created within function will be Feb 8, 2016
|
// Freezes time so date created within function will be Feb 8, 2016
|
||||||
const clock = lolex.install(1454974984001);
|
const clock = lolex.install(1454974984001);
|
||||||
|
@ -213,7 +213,7 @@ describe('v4 queryAuthCheck', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should successfully return v4 and no error if X-Amz-Expires param ' +
|
test('should successfully return v4 and no error if X-Amz-Expires param ' +
|
||||||
'is less thant 604800 (7 days)', done => {
|
'is less thant 604800 (7 days)', done => {
|
||||||
// Freezes time so date created within function will be Feb 8, 2016
|
// Freezes time so date created within function will be Feb 8, 2016
|
||||||
const clock = lolex.install(1454974984001);
|
const clock = lolex.install(1454974984001);
|
||||||
|
|
|
@ -7,7 +7,7 @@ const calculateSigningKey =
|
||||||
.calculateSigningKey;
|
.calculateSigningKey;
|
||||||
|
|
||||||
describe('v4 signing key calculation', () => {
|
describe('v4 signing key calculation', () => {
|
||||||
it('should calculate a signing key in accordance with AWS rules', () => {
|
test('should calculate a signing key in accordance with AWS rules', () => {
|
||||||
const secretKey = 'verySecretKey1';
|
const secretKey = 'verySecretKey1';
|
||||||
const region = 'us-east-1';
|
const region = 'us-east-1';
|
||||||
const scopeDate = '20160209';
|
const scopeDate = '20160209';
|
||||||
|
|
|
@ -14,7 +14,7 @@ const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
|
||||||
const log = new DummyRequestLogger();
|
const log = new DummyRequestLogger();
|
||||||
|
|
||||||
describe('convertAmzTimeToMs function', () => {
|
describe('convertAmzTimeToMs function', () => {
|
||||||
it('should convert ISO8601Timestamp format without ' +
|
test('should convert ISO8601Timestamp format without ' +
|
||||||
'dashes or colons, e.g. 20160202T220410Z to milliseconds since ' +
|
'dashes or colons, e.g. 20160202T220410Z to milliseconds since ' +
|
||||||
'Unix epoch', () => {
|
'Unix epoch', () => {
|
||||||
const input = '20160202T220410Z';
|
const input = '20160202T220410Z';
|
||||||
|
@ -25,7 +25,7 @@ describe('convertAmzTimeToMs function', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('convertUTCtoISO8601 function', () => {
|
describe('convertUTCtoISO8601 function', () => {
|
||||||
it('should UTC timestamp to ISO8601 timestamp', () => {
|
test('should UTC timestamp to ISO8601 timestamp', () => {
|
||||||
const input = 'Sun, 08 Feb 2015 20:14:05 GMT';
|
const input = 'Sun, 08 Feb 2015 20:14:05 GMT';
|
||||||
const expectedOutput = '20150208T201405Z';
|
const expectedOutput = '20150208T201405Z';
|
||||||
const actualOutput = convertUTCtoISO8601(input);
|
const actualOutput = convertUTCtoISO8601(input);
|
||||||
|
@ -35,17 +35,17 @@ describe('convertUTCtoISO8601 function', () => {
|
||||||
|
|
||||||
describe('checkTimeSkew function', () => {
|
describe('checkTimeSkew function', () => {
|
||||||
let clock;
|
let clock;
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
// Time is 2016-03-17T18:22:01.033Z
|
// Time is 2016-03-17T18:22:01.033Z
|
||||||
clock = lolex.install(1458238921033);
|
clock = lolex.install(1458238921033);
|
||||||
});
|
});
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
clock.uninstall();
|
clock.uninstall();
|
||||||
});
|
});
|
||||||
|
|
||||||
// Our default expiry for header auth check is 15 minutes (in secs)
|
// Our default expiry for header auth check is 15 minutes (in secs)
|
||||||
const expiry = (15 * 60);
|
const expiry = (15 * 60);
|
||||||
it('should allow requests with timestamps under 15 minutes ' +
|
test('should allow requests with timestamps under 15 minutes ' +
|
||||||
'in the future', () => {
|
'in the future', () => {
|
||||||
const timestamp14MinInFuture = '20160317T183601033Z';
|
const timestamp14MinInFuture = '20160317T183601033Z';
|
||||||
const expectedOutput = false;
|
const expectedOutput = false;
|
||||||
|
@ -54,7 +54,7 @@ describe('checkTimeSkew function', () => {
|
||||||
assert.strictEqual(actualOutput, expectedOutput);
|
assert.strictEqual(actualOutput, expectedOutput);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow requests with timestamps more than 15 minutes ' +
|
test('should not allow requests with timestamps more than 15 minutes ' +
|
||||||
'in the future', () => {
|
'in the future', () => {
|
||||||
const timestamp16MinInFuture = '20160317T183801033Z';
|
const timestamp16MinInFuture = '20160317T183801033Z';
|
||||||
const expectedOutput = true;
|
const expectedOutput = true;
|
||||||
|
@ -63,7 +63,7 @@ describe('checkTimeSkew function', () => {
|
||||||
assert.strictEqual(actualOutput, expectedOutput);
|
assert.strictEqual(actualOutput, expectedOutput);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow requests with timestamps earlier than the ' +
|
test('should allow requests with timestamps earlier than the ' +
|
||||||
'the expiry', () => {
|
'the expiry', () => {
|
||||||
const timestamp14MinInPast = '20160317T180801033Z';
|
const timestamp14MinInPast = '20160317T180801033Z';
|
||||||
const expectedOutput = false;
|
const expectedOutput = false;
|
||||||
|
@ -72,7 +72,7 @@ describe('checkTimeSkew function', () => {
|
||||||
assert.strictEqual(actualOutput, expectedOutput);
|
assert.strictEqual(actualOutput, expectedOutput);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow requests with timestamps later ' +
|
test('should not allow requests with timestamps later ' +
|
||||||
'than the expiry', () => {
|
'than the expiry', () => {
|
||||||
const timestamp16MinInPast = '20160317T180601033Z';
|
const timestamp16MinInPast = '20160317T180601033Z';
|
||||||
const expectedOutput = true;
|
const expectedOutput = true;
|
||||||
|
|
|
@ -40,7 +40,7 @@ function checkValueNotInDb(db, k, done) {
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('IndexTransaction', () => {
|
describe('IndexTransaction', () => {
|
||||||
it('should allow put', done => {
|
test('should allow put', done => {
|
||||||
const db = createDb();
|
const db = createDb();
|
||||||
const transaction = new IndexTransaction(db);
|
const transaction = new IndexTransaction(db);
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ describe('IndexTransaction', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow del', done => {
|
test('should allow del', done => {
|
||||||
const db = createDb();
|
const db = createDb();
|
||||||
const transaction = new IndexTransaction(db);
|
const transaction = new IndexTransaction(db);
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ describe('IndexTransaction', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should commit put and del combined', done => {
|
test('should commit put and del combined', done => {
|
||||||
const db = createDb();
|
const db = createDb();
|
||||||
const transaction = new IndexTransaction(db);
|
const transaction = new IndexTransaction(db);
|
||||||
|
|
||||||
|
@ -124,7 +124,7 @@ describe('IndexTransaction', () => {
|
||||||
.write(commitTransactionAndCheck);
|
.write(commitTransactionAndCheck);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should refuse types other than del and put', done => {
|
test('should refuse types other than del and put', done => {
|
||||||
const transaction = new IndexTransaction();
|
const transaction = new IndexTransaction();
|
||||||
|
|
||||||
function tryPush() {
|
function tryPush() {
|
||||||
|
@ -147,7 +147,7 @@ describe('IndexTransaction', () => {
|
||||||
assert.throws(tryPush, validateError);
|
assert.throws(tryPush, validateError);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should refuse put without key', done => {
|
test('should refuse put without key', done => {
|
||||||
const transaction = new IndexTransaction();
|
const transaction = new IndexTransaction();
|
||||||
|
|
||||||
function tryPush() {
|
function tryPush() {
|
||||||
|
@ -169,7 +169,7 @@ describe('IndexTransaction', () => {
|
||||||
assert.throws(tryPush, validateError);
|
assert.throws(tryPush, validateError);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should refuse del without key', done => {
|
test('should refuse del without key', done => {
|
||||||
const transaction = new IndexTransaction();
|
const transaction = new IndexTransaction();
|
||||||
|
|
||||||
function tryPush() {
|
function tryPush() {
|
||||||
|
@ -190,7 +190,7 @@ describe('IndexTransaction', () => {
|
||||||
assert.throws(tryPush, validateError);
|
assert.throws(tryPush, validateError);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should refuse put without value', done => {
|
test('should refuse put without value', done => {
|
||||||
const transaction = new IndexTransaction();
|
const transaction = new IndexTransaction();
|
||||||
|
|
||||||
function tryPush() {
|
function tryPush() {
|
||||||
|
@ -212,7 +212,7 @@ describe('IndexTransaction', () => {
|
||||||
assert.throws(tryPush, validateError);
|
assert.throws(tryPush, validateError);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should refuse to commit without any ops', done => {
|
test('should refuse to commit without any ops', done => {
|
||||||
const transaction = new IndexTransaction();
|
const transaction = new IndexTransaction();
|
||||||
|
|
||||||
transaction.commit(err => {
|
transaction.commit(err => {
|
||||||
|
@ -224,7 +224,7 @@ describe('IndexTransaction', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should refuse to commit twice', done => {
|
test('should refuse to commit twice', done => {
|
||||||
const transaction = new IndexTransaction(createDb());
|
const transaction = new IndexTransaction(createDb());
|
||||||
|
|
||||||
transaction.push({
|
transaction.push({
|
||||||
|
@ -250,7 +250,7 @@ describe('IndexTransaction', () => {
|
||||||
transaction.commit(tryCommitAgain);
|
transaction.commit(tryCommitAgain);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should refuse add an op if already committed', done => {
|
test('should refuse add an op if already committed', done => {
|
||||||
const transaction = new IndexTransaction(createDb());
|
const transaction = new IndexTransaction(createDb());
|
||||||
|
|
||||||
function push() {
|
function push() {
|
||||||
|
@ -282,7 +282,7 @@ describe('IndexTransaction', () => {
|
||||||
transaction.commit(tryPushAgain);
|
transaction.commit(tryPushAgain);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have a working put shortcut method', done => {
|
test('should have a working put shortcut method', done => {
|
||||||
const db = createDb();
|
const db = createDb();
|
||||||
const transaction = new IndexTransaction(db);
|
const transaction = new IndexTransaction(db);
|
||||||
|
|
||||||
|
@ -297,7 +297,7 @@ describe('IndexTransaction', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have a working del shortcut method', done => {
|
test('should have a working del shortcut method', done => {
|
||||||
const db = createDb();
|
const db = createDb();
|
||||||
const transaction = new IndexTransaction(db);
|
const transaction = new IndexTransaction(db);
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ const errors = require('../../index').errors;
|
||||||
|
|
||||||
describe('Errors: ', () => {
|
describe('Errors: ', () => {
|
||||||
Object.keys(errors).forEach(index => {
|
Object.keys(errors).forEach(index => {
|
||||||
it(`should return and instance of ${index} Error`, done => {
|
test(`should return and instance of ${index} Error`, done => {
|
||||||
assert.strictEqual(errors[index] instanceof Error, true,
|
assert.strictEqual(errors[index] instanceof Error, true,
|
||||||
'should be an instance of Error');
|
'should be an instance of Error');
|
||||||
assert.strictEqual(errors[index].code, errorsJSON[index].code,
|
assert.strictEqual(errors[index].code, errorsJSON[index].code,
|
||||||
|
@ -18,7 +18,7 @@ describe('Errors: ', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow custom error descriptions', () => {
|
test('should allow custom error descriptions', () => {
|
||||||
const originDescription = errors.NoSuchEntity.description;
|
const originDescription = errors.NoSuchEntity.description;
|
||||||
const error =
|
const error =
|
||||||
errors.NoSuchEntity.customizeDescription('custom-description');
|
errors.NoSuchEntity.customizeDescription('custom-description');
|
||||||
|
|
|
@ -26,52 +26,74 @@ function cidrListMatchCheck(cidrList, ip, expectedRes) {
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('Parse IP address', () => {
|
describe('Parse IP address', () => {
|
||||||
it('should parse IPv4 address',
|
test(
|
||||||
() => parseValidIpCheck('192.168.1.1', ipaddr.IPv4));
|
'should parse IPv4 address',
|
||||||
|
() => parseValidIpCheck('192.168.1.1', ipaddr.IPv4)
|
||||||
|
);
|
||||||
|
|
||||||
it('should parse IPv6 address',
|
test(
|
||||||
() => parseValidIpCheck('2001:cdba::3257:9652', ipaddr.IPv6));
|
'should parse IPv6 address',
|
||||||
|
() => parseValidIpCheck('2001:cdba::3257:9652', ipaddr.IPv6)
|
||||||
|
);
|
||||||
|
|
||||||
it('should parse IPv4 mapped IPv6 address',
|
test(
|
||||||
|
'should parse IPv4 mapped IPv6 address',
|
||||||
// ::ffff:c0a8:101 mapped for 192.168.1.1
|
// ::ffff:c0a8:101 mapped for 192.168.1.1
|
||||||
() => parseValidIpCheck('::ffff:c0a8:101', ipaddr.IPv4));
|
() => parseValidIpCheck('::ffff:c0a8:101', ipaddr.IPv4)
|
||||||
|
);
|
||||||
|
|
||||||
['260.384.2.1', 'INVALID', '', null, undefined].forEach(item => {
|
['260.384.2.1', 'INVALID', '', null, undefined].forEach(item => {
|
||||||
it(`should return empty object for invalid IP address: (${item})`,
|
test(
|
||||||
() => parseInvalidIpCheck(item));
|
`should return empty object for invalid IP address: (${item})`,
|
||||||
|
() => parseInvalidIpCheck(item)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Check IP matches CIDR range', () => {
|
describe('Check IP matches CIDR range', () => {
|
||||||
it('should match IP in a range',
|
test(
|
||||||
() => cidrMatchCheck('192.168.1.0/24', '192.168.1.1', true));
|
'should match IP in a range',
|
||||||
|
() => cidrMatchCheck('192.168.1.0/24', '192.168.1.1', true)
|
||||||
|
);
|
||||||
|
|
||||||
it('should not match IP not in a range',
|
test(
|
||||||
() => cidrMatchCheck('192.168.1.0/24', '127.0.0.1', false));
|
'should not match IP not in a range',
|
||||||
|
() => cidrMatchCheck('192.168.1.0/24', '127.0.0.1', false)
|
||||||
|
);
|
||||||
|
|
||||||
it('should match if range equals IP',
|
test(
|
||||||
() => cidrMatchCheck('192.168.1.1', '192.168.1.1', true));
|
'should match if range equals IP',
|
||||||
|
() => cidrMatchCheck('192.168.1.1', '192.168.1.1', true)
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
['260.384.2.1', 'INVALID', '', null, undefined].forEach(item => {
|
['260.384.2.1', 'INVALID', '', null, undefined].forEach(item => {
|
||||||
it(`should not match for invalid IP: (${item})`,
|
test(
|
||||||
() => cidrMatchCheck('192.168.1.0/24', item, false));
|
`should not match for invalid IP: (${item})`,
|
||||||
|
() => cidrMatchCheck('192.168.1.0/24', item, false)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Check IP matches a list of CIDR ranges', () => {
|
describe('Check IP matches a list of CIDR ranges', () => {
|
||||||
it('should match IP in a valid range',
|
test(
|
||||||
|
'should match IP in a valid range',
|
||||||
() => cidrListMatchCheck(['192.168.1.0/24', '192.168.100.14/24',
|
() => cidrListMatchCheck(['192.168.1.0/24', '192.168.100.14/24',
|
||||||
'2001:db8::'], '192.168.100.1', true));
|
'2001:db8::'], '192.168.100.1', true)
|
||||||
|
);
|
||||||
|
|
||||||
[
|
[
|
||||||
[['127.0.0.1'], '127.0.0.2'],
|
[['127.0.0.1'], '127.0.0.2'],
|
||||||
[['192.168.1.1'], '192.168.1.1'],
|
[['192.168.1.1'], '192.168.1.1'],
|
||||||
].forEach(item =>
|
].forEach(item =>
|
||||||
it(`should match IP ${item[0][0]} without CIDR range`,
|
test(
|
||||||
() => cidrListMatchCheck(item[0], item[1], true))
|
`should match IP ${item[0][0]} without CIDR range`,
|
||||||
|
() => cidrListMatchCheck(item[0], item[1], true)
|
||||||
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
it('should not range match if CIDR range is not provided',
|
test(
|
||||||
() => cidrListMatchCheck(['192.168.1.1'], '192.168.1.3', false));
|
'should not range match if CIDR range is not provided',
|
||||||
|
() => cidrListMatchCheck(['192.168.1.1'], '192.168.1.3', false)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
|
@ -5,9 +5,8 @@ const jsutil = require('../../index').jsutil;
|
||||||
|
|
||||||
describe('JSUtil', () => {
|
describe('JSUtil', () => {
|
||||||
describe('once', () => {
|
describe('once', () => {
|
||||||
it('should call the wrapped function only once when invoked ' +
|
test('should call the wrapped function only once when invoked ' +
|
||||||
'multiple times',
|
'multiple times', done => {
|
||||||
done => {
|
|
||||||
let value = 42;
|
let value = 42;
|
||||||
let value2 = 51;
|
let value2 = 51;
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ describe('Matrix', () => {
|
||||||
}).execute();
|
}).execute();
|
||||||
|
|
||||||
describe('Check if matrix was well generated', () => {
|
describe('Check if matrix was well generated', () => {
|
||||||
it('Has generated 40 matrix', done => {
|
test('Has generated 40 matrix', done => {
|
||||||
const callback = () => {
|
const callback = () => {
|
||||||
if (numberOfCall === 40) {
|
if (numberOfCall === 40) {
|
||||||
done();
|
done();
|
||||||
|
@ -66,7 +66,7 @@ describe('Matrix', () => {
|
||||||
}).execute();
|
}).execute();
|
||||||
|
|
||||||
describe('Check if matrix was well generated', () => {
|
describe('Check if matrix was well generated', () => {
|
||||||
it('Should was call 20 times per auth value', done => {
|
test('Should was call 20 times per auth value', done => {
|
||||||
const callback = () => {
|
const callback = () => {
|
||||||
if (numberOfCallV2 === 20 && numberOfCallV4 === 20) {
|
if (numberOfCallV2 === 20 && numberOfCallV4 === 20) {
|
||||||
done();
|
done();
|
||||||
|
@ -107,7 +107,7 @@ describe('Matrix', () => {
|
||||||
}).execute();
|
}).execute();
|
||||||
|
|
||||||
describe('Check if matrix was well generated', () => {
|
describe('Check if matrix was well generated', () => {
|
||||||
it('All exception was called', done => {
|
test('All exception was called', done => {
|
||||||
const callback = () => {
|
const callback = () => {
|
||||||
if (callAbcd === true && callUndefined === true) {
|
if (callAbcd === true && callUndefined === true) {
|
||||||
done();
|
done();
|
||||||
|
@ -150,7 +150,7 @@ describe('Matrix', () => {
|
||||||
}).execute();
|
}).execute();
|
||||||
|
|
||||||
describe('Check if matrix was well generated', () => {
|
describe('Check if matrix was well generated', () => {
|
||||||
it('All exception was called', done => {
|
test('All exception was called', done => {
|
||||||
const callback = () => {
|
const callback = () => {
|
||||||
if (callAbcd === true && callUndefined === true) {
|
if (callAbcd === true && callUndefined === true) {
|
||||||
done();
|
done();
|
||||||
|
@ -182,7 +182,7 @@ describe('Matrix', () => {
|
||||||
}, 'should generate matrix').execute();
|
}, 'should generate matrix').execute();
|
||||||
|
|
||||||
describe('Check if matrix was well generated', () => {
|
describe('Check if matrix was well generated', () => {
|
||||||
it('Has been called', done => {
|
test('Has been called', done => {
|
||||||
const callback = () => {
|
const callback = () => {
|
||||||
if (hasBeenCalled === true) {
|
if (hasBeenCalled === true) {
|
||||||
done();
|
done();
|
||||||
|
@ -207,7 +207,7 @@ describe('Matrix', () => {
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
anExceptionWasFound = true;
|
anExceptionWasFound = true;
|
||||||
}
|
}
|
||||||
it('An exception was launched', done => {
|
test('An exception was launched', done => {
|
||||||
assert.equal(anExceptionWasFound, true);
|
assert.equal(anExceptionWasFound, true);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
@ -226,7 +226,7 @@ describe('Matrix', () => {
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
anExceptionWasFound = true;
|
anExceptionWasFound = true;
|
||||||
}
|
}
|
||||||
it('An exception was launched', done => {
|
test('An exception was launched', done => {
|
||||||
assert.equal(anExceptionWasFound, true);
|
assert.equal(anExceptionWasFound, true);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
|
@ -28,7 +28,8 @@ describe('StatsClient class', () => {
|
||||||
|
|
||||||
afterEach(() => redisClient.clear(() => {}));
|
afterEach(() => redisClient.clear(() => {}));
|
||||||
|
|
||||||
it('should correctly record a new request by default one increment',
|
test(
|
||||||
|
'should correctly record a new request by default one increment',
|
||||||
done => {
|
done => {
|
||||||
async.series([
|
async.series([
|
||||||
next => {
|
next => {
|
||||||
|
@ -50,9 +51,10 @@ describe('StatsClient class', () => {
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should record new requests by defined amount increments', done => {
|
test('should record new requests by defined amount increments', done => {
|
||||||
function noop() {}
|
function noop() {}
|
||||||
|
|
||||||
async.series([
|
async.series([
|
||||||
|
@ -86,7 +88,7 @@ describe('StatsClient class', () => {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should correctly record a 500 on the server', done => {
|
test('should correctly record a 500 on the server', done => {
|
||||||
statsClient.report500(id, (err, res) => {
|
statsClient.report500(id, (err, res) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
|
||||||
|
@ -96,7 +98,7 @@ describe('StatsClient class', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should respond back with total requests', done => {
|
test('should respond back with total requests', done => {
|
||||||
async.series([
|
async.series([
|
||||||
next => {
|
next => {
|
||||||
statsClient.reportNewRequest(id, err => {
|
statsClient.reportNewRequest(id, err => {
|
||||||
|
|
|
@ -108,7 +108,7 @@ describe('ARN object model', () => {
|
||||||
isIAMUser: false,
|
isIAMUser: false,
|
||||||
isIAMRole: false,
|
isIAMRole: false,
|
||||||
},
|
},
|
||||||
].forEach(arnTest => it(`should accept ARN "${arnTest.arn}"`, () => {
|
].forEach(arnTest => test(`should accept ARN "${arnTest.arn}"`, () => {
|
||||||
const arnObj = ARN.createFromString(arnTest.arn);
|
const arnObj = ARN.createFromString(arnTest.arn);
|
||||||
assert(arnObj instanceof ARN);
|
assert(arnObj instanceof ARN);
|
||||||
assert.strictEqual(arnObj.getService(), arnTest.service);
|
assert.strictEqual(arnObj.getService(), arnTest.service);
|
||||||
|
@ -128,7 +128,7 @@ describe('ARN object model', () => {
|
||||||
'arn:aws:xxx::123456789012:role/backbeat',
|
'arn:aws:xxx::123456789012:role/backbeat',
|
||||||
'arn:aws:s3::123456789012345:role/backbeat',
|
'arn:aws:s3::123456789012345:role/backbeat',
|
||||||
'arn:aws:s3::12345678901b:role/backbeat',
|
'arn:aws:s3::12345678901b:role/backbeat',
|
||||||
].forEach(arn => it(`should fail with invalid ARN "${arn}"`, () => {
|
].forEach(arn => test(`should fail with invalid ARN "${arn}"`, () => {
|
||||||
const res = ARN.createFromString(arn);
|
const res = ARN.createFromString(arn);
|
||||||
assert.notStrictEqual(res.error, undefined);
|
assert.notStrictEqual(res.error, undefined);
|
||||||
}));
|
}));
|
||||||
|
|
|
@ -136,7 +136,7 @@ Object.keys(acl).forEach(
|
||||||
|
|
||||||
describe('serialize/deSerialize on BucketInfo class', () => {
|
describe('serialize/deSerialize on BucketInfo class', () => {
|
||||||
const serialized = dummyBucket.serialize();
|
const serialized = dummyBucket.serialize();
|
||||||
it('should serialize', done => {
|
test('should serialize', done => {
|
||||||
assert.strictEqual(typeof serialized, 'string');
|
assert.strictEqual(typeof serialized, 'string');
|
||||||
const bucketInfos = {
|
const bucketInfos = {
|
||||||
acl: dummyBucket._acl,
|
acl: dummyBucket._acl,
|
||||||
|
@ -163,7 +163,7 @@ Object.keys(acl).forEach(
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should deSerialize into an instance of BucketInfo', done => {
|
test('should deSerialize into an instance of BucketInfo', done => {
|
||||||
const serialized = dummyBucket.serialize();
|
const serialized = dummyBucket.serialize();
|
||||||
const deSerialized = BucketInfo.deSerialize(serialized);
|
const deSerialized = BucketInfo.deSerialize(serialized);
|
||||||
assert.strictEqual(typeof deSerialized, 'object');
|
assert.strictEqual(typeof deSerialized, 'object');
|
||||||
|
@ -174,8 +174,7 @@ Object.keys(acl).forEach(
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('constructor', () => {
|
describe('constructor', () => {
|
||||||
it('this should have the right BucketInfo types',
|
test('this should have the right BucketInfo types', () => {
|
||||||
() => {
|
|
||||||
assert.strictEqual(typeof dummyBucket.getName(), 'string');
|
assert.strictEqual(typeof dummyBucket.getName(), 'string');
|
||||||
assert.strictEqual(typeof dummyBucket.getOwner(), 'string');
|
assert.strictEqual(typeof dummyBucket.getOwner(), 'string');
|
||||||
assert.strictEqual(typeof dummyBucket.getOwnerDisplayName(),
|
assert.strictEqual(typeof dummyBucket.getOwnerDisplayName(),
|
||||||
|
@ -183,7 +182,7 @@ Object.keys(acl).forEach(
|
||||||
assert.strictEqual(typeof dummyBucket.getCreationDate(),
|
assert.strictEqual(typeof dummyBucket.getCreationDate(),
|
||||||
'string');
|
'string');
|
||||||
});
|
});
|
||||||
it('this should have the right acl\'s types', () => {
|
test('this should have the right acl\'s types', () => {
|
||||||
assert.strictEqual(typeof dummyBucket.getAcl(), 'object');
|
assert.strictEqual(typeof dummyBucket.getAcl(), 'object');
|
||||||
assert.strictEqual(
|
assert.strictEqual(
|
||||||
typeof dummyBucket.getAcl().Canned, 'string');
|
typeof dummyBucket.getAcl().Canned, 'string');
|
||||||
|
@ -193,11 +192,11 @@ Object.keys(acl).forEach(
|
||||||
assert(Array.isArray(dummyBucket.getAcl().READ));
|
assert(Array.isArray(dummyBucket.getAcl().READ));
|
||||||
assert(Array.isArray(dummyBucket.getAcl().READ_ACP));
|
assert(Array.isArray(dummyBucket.getAcl().READ_ACP));
|
||||||
});
|
});
|
||||||
it('this should have the right acls', () => {
|
test('this should have the right acls', () => {
|
||||||
assert.deepStrictEqual(dummyBucket.getAcl(),
|
assert.deepStrictEqual(dummyBucket.getAcl(),
|
||||||
acl[aclObj] || emptyAcl);
|
acl[aclObj] || emptyAcl);
|
||||||
});
|
});
|
||||||
it('this should have the right website config types', () => {
|
test('this should have the right website config types', () => {
|
||||||
const websiteConfig = dummyBucket.getWebsiteConfiguration();
|
const websiteConfig = dummyBucket.getWebsiteConfiguration();
|
||||||
assert.strictEqual(typeof websiteConfig, 'object');
|
assert.strictEqual(typeof websiteConfig, 'object');
|
||||||
assert.strictEqual(typeof websiteConfig._indexDocument,
|
assert.strictEqual(typeof websiteConfig._indexDocument,
|
||||||
|
@ -206,7 +205,7 @@ Object.keys(acl).forEach(
|
||||||
'string');
|
'string');
|
||||||
assert(Array.isArray(websiteConfig._routingRules));
|
assert(Array.isArray(websiteConfig._routingRules));
|
||||||
});
|
});
|
||||||
it('this should have the right cors config types', () => {
|
test('this should have the right cors config types', () => {
|
||||||
const cors = dummyBucket.getCors();
|
const cors = dummyBucket.getCors();
|
||||||
assert(Array.isArray(cors));
|
assert(Array.isArray(cors));
|
||||||
assert(Array.isArray(cors[0].allowedMethods));
|
assert(Array.isArray(cors[0].allowedMethods));
|
||||||
|
@ -220,54 +219,53 @@ Object.keys(acl).forEach(
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getters on BucketInfo class', () => {
|
describe('getters on BucketInfo class', () => {
|
||||||
it('getACl should return the acl', () => {
|
test('getACl should return the acl', () => {
|
||||||
assert.deepStrictEqual(dummyBucket.getAcl(),
|
assert.deepStrictEqual(dummyBucket.getAcl(),
|
||||||
acl[aclObj] || emptyAcl);
|
acl[aclObj] || emptyAcl);
|
||||||
});
|
});
|
||||||
it('getName should return name', () => {
|
test('getName should return name', () => {
|
||||||
assert.deepStrictEqual(dummyBucket.getName(), bucketName);
|
assert.deepStrictEqual(dummyBucket.getName(), bucketName);
|
||||||
});
|
});
|
||||||
it('getOwner should return owner', () => {
|
test('getOwner should return owner', () => {
|
||||||
assert.deepStrictEqual(dummyBucket.getOwner(), owner);
|
assert.deepStrictEqual(dummyBucket.getOwner(), owner);
|
||||||
});
|
});
|
||||||
it('getOwnerDisplayName should return ownerDisplayName', () => {
|
test('getOwnerDisplayName should return ownerDisplayName', () => {
|
||||||
assert.deepStrictEqual(dummyBucket.getOwnerDisplayName(),
|
assert.deepStrictEqual(dummyBucket.getOwnerDisplayName(),
|
||||||
ownerDisplayName);
|
ownerDisplayName);
|
||||||
});
|
});
|
||||||
it('getCreationDate should return creationDate', () => {
|
test('getCreationDate should return creationDate', () => {
|
||||||
assert.deepStrictEqual(dummyBucket.getCreationDate(), testDate);
|
assert.deepStrictEqual(dummyBucket.getCreationDate(), testDate);
|
||||||
});
|
});
|
||||||
it('getVersioningConfiguration should return configuration', () => {
|
test('getVersioningConfiguration should return configuration', () => {
|
||||||
assert.deepStrictEqual(dummyBucket.getVersioningConfiguration(),
|
assert.deepStrictEqual(dummyBucket.getVersioningConfiguration(),
|
||||||
testVersioningConfiguration);
|
testVersioningConfiguration);
|
||||||
});
|
});
|
||||||
it('getWebsiteConfiguration should return configuration', () => {
|
test('getWebsiteConfiguration should return configuration', () => {
|
||||||
assert.deepStrictEqual(dummyBucket.getWebsiteConfiguration(),
|
assert.deepStrictEqual(dummyBucket.getWebsiteConfiguration(),
|
||||||
testWebsiteConfiguration);
|
testWebsiteConfiguration);
|
||||||
});
|
});
|
||||||
it('getLocationConstraint should return locationConstraint', () => {
|
test('getLocationConstraint should return locationConstraint', () => {
|
||||||
assert.deepStrictEqual(dummyBucket.getLocationConstraint(),
|
assert.deepStrictEqual(dummyBucket.getLocationConstraint(),
|
||||||
testLocationConstraint);
|
testLocationConstraint);
|
||||||
});
|
});
|
||||||
it('getCors should return CORS configuration', () => {
|
test('getCors should return CORS configuration', () => {
|
||||||
assert.deepStrictEqual(dummyBucket.getCors(),
|
assert.deepStrictEqual(dummyBucket.getCors(),
|
||||||
testCorsConfiguration);
|
testCorsConfiguration);
|
||||||
});
|
});
|
||||||
it('getLifeCycleConfiguration should return configuration', () => {
|
test('getLifeCycleConfiguration should return configuration', () => {
|
||||||
assert.deepStrictEqual(dummyBucket.getLifecycleConfiguration(),
|
assert.deepStrictEqual(dummyBucket.getLifecycleConfiguration(),
|
||||||
testLifecycleConfiguration);
|
testLifecycleConfiguration);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('setters on BucketInfo class', () => {
|
describe('setters on BucketInfo class', () => {
|
||||||
it('setCannedAcl should set acl.Canned', () => {
|
test('setCannedAcl should set acl.Canned', () => {
|
||||||
const testAclCanned = 'public-read';
|
const testAclCanned = 'public-read';
|
||||||
dummyBucket.setCannedAcl(testAclCanned);
|
dummyBucket.setCannedAcl(testAclCanned);
|
||||||
assert.deepStrictEqual(
|
assert.deepStrictEqual(
|
||||||
dummyBucket.getAcl().Canned, testAclCanned);
|
dummyBucket.getAcl().Canned, testAclCanned);
|
||||||
});
|
});
|
||||||
it('setSpecificAcl should set the acl of a specified bucket',
|
test('setSpecificAcl should set the acl of a specified bucket', () => {
|
||||||
() => {
|
|
||||||
const typeOfGrant = 'WRITE';
|
const typeOfGrant = 'WRITE';
|
||||||
dummyBucket.setSpecificAcl(owner, typeOfGrant);
|
dummyBucket.setSpecificAcl(owner, typeOfGrant);
|
||||||
const lastIndex =
|
const lastIndex =
|
||||||
|
@ -275,7 +273,7 @@ Object.keys(acl).forEach(
|
||||||
assert.deepStrictEqual(
|
assert.deepStrictEqual(
|
||||||
dummyBucket.getAcl()[typeOfGrant][lastIndex], owner);
|
dummyBucket.getAcl()[typeOfGrant][lastIndex], owner);
|
||||||
});
|
});
|
||||||
it('setFullAcl should set full set of ACLs', () => {
|
test('setFullAcl should set full set of ACLs', () => {
|
||||||
const newACLs = {
|
const newACLs = {
|
||||||
Canned: '',
|
Canned: '',
|
||||||
FULL_CONTROL: ['someOtherAccount'],
|
FULL_CONTROL: ['someOtherAccount'],
|
||||||
|
@ -290,30 +288,29 @@ Object.keys(acl).forEach(
|
||||||
assert.deepStrictEqual(dummyBucket.getAcl().WRITE_ACP,
|
assert.deepStrictEqual(dummyBucket.getAcl().WRITE_ACP,
|
||||||
['yetAnotherAccount']);
|
['yetAnotherAccount']);
|
||||||
});
|
});
|
||||||
it('setName should set the bucket name', () => {
|
test('setName should set the bucket name', () => {
|
||||||
const newName = 'newName';
|
const newName = 'newName';
|
||||||
dummyBucket.setName(newName);
|
dummyBucket.setName(newName);
|
||||||
assert.deepStrictEqual(dummyBucket.getName(), newName);
|
assert.deepStrictEqual(dummyBucket.getName(), newName);
|
||||||
});
|
});
|
||||||
it('setOwner should set the owner', () => {
|
test('setOwner should set the owner', () => {
|
||||||
const newOwner = 'newOwner';
|
const newOwner = 'newOwner';
|
||||||
dummyBucket.setOwner(newOwner);
|
dummyBucket.setOwner(newOwner);
|
||||||
assert.deepStrictEqual(dummyBucket.getOwner(), newOwner);
|
assert.deepStrictEqual(dummyBucket.getOwner(), newOwner);
|
||||||
});
|
});
|
||||||
it('getOwnerDisplayName should return ownerDisplayName', () => {
|
test('getOwnerDisplayName should return ownerDisplayName', () => {
|
||||||
const newOwnerDisplayName = 'newOwnerDisplayName';
|
const newOwnerDisplayName = 'newOwnerDisplayName';
|
||||||
dummyBucket.setOwnerDisplayName(newOwnerDisplayName);
|
dummyBucket.setOwnerDisplayName(newOwnerDisplayName);
|
||||||
assert.deepStrictEqual(dummyBucket.getOwnerDisplayName(),
|
assert.deepStrictEqual(dummyBucket.getOwnerDisplayName(),
|
||||||
newOwnerDisplayName);
|
newOwnerDisplayName);
|
||||||
});
|
});
|
||||||
it('setLocationConstraint should set the locationConstraint',
|
test('setLocationConstraint should set the locationConstraint', () => {
|
||||||
() => {
|
|
||||||
const newLocation = 'newLocation';
|
const newLocation = 'newLocation';
|
||||||
dummyBucket.setLocationConstraint(newLocation);
|
dummyBucket.setLocationConstraint(newLocation);
|
||||||
assert.deepStrictEqual(
|
assert.deepStrictEqual(
|
||||||
dummyBucket.getLocationConstraint(), newLocation);
|
dummyBucket.getLocationConstraint(), newLocation);
|
||||||
});
|
});
|
||||||
it('setVersioningConfiguration should set configuration', () => {
|
test('setVersioningConfiguration should set configuration', () => {
|
||||||
const newVersioningConfiguration =
|
const newVersioningConfiguration =
|
||||||
{ Status: 'Enabled', MfaDelete: 'Enabled' };
|
{ Status: 'Enabled', MfaDelete: 'Enabled' };
|
||||||
dummyBucket
|
dummyBucket
|
||||||
|
@ -321,7 +318,7 @@ Object.keys(acl).forEach(
|
||||||
assert.deepStrictEqual(dummyBucket.getVersioningConfiguration(),
|
assert.deepStrictEqual(dummyBucket.getVersioningConfiguration(),
|
||||||
newVersioningConfiguration);
|
newVersioningConfiguration);
|
||||||
});
|
});
|
||||||
it('setWebsiteConfiguration should set configuration', () => {
|
test('setWebsiteConfiguration should set configuration', () => {
|
||||||
const newWebsiteConfiguration = {
|
const newWebsiteConfiguration = {
|
||||||
redirectAllRequestsTo: {
|
redirectAllRequestsTo: {
|
||||||
hostName: 'www.example.com',
|
hostName: 'www.example.com',
|
||||||
|
@ -333,14 +330,14 @@ Object.keys(acl).forEach(
|
||||||
assert.deepStrictEqual(dummyBucket.getWebsiteConfiguration(),
|
assert.deepStrictEqual(dummyBucket.getWebsiteConfiguration(),
|
||||||
newWebsiteConfiguration);
|
newWebsiteConfiguration);
|
||||||
});
|
});
|
||||||
it('setCors should set CORS configuration', () => {
|
test('setCors should set CORS configuration', () => {
|
||||||
const newCorsConfiguration =
|
const newCorsConfiguration =
|
||||||
[{ allowedMethods: ['PUT'], allowedOrigins: ['*'] }];
|
[{ allowedMethods: ['PUT'], allowedOrigins: ['*'] }];
|
||||||
dummyBucket.setCors(newCorsConfiguration);
|
dummyBucket.setCors(newCorsConfiguration);
|
||||||
assert.deepStrictEqual(dummyBucket.getCors(),
|
assert.deepStrictEqual(dummyBucket.getCors(),
|
||||||
newCorsConfiguration);
|
newCorsConfiguration);
|
||||||
});
|
});
|
||||||
it('setReplicationConfiguration should set replication ' +
|
test('setReplicationConfiguration should set replication ' +
|
||||||
'configuration', () => {
|
'configuration', () => {
|
||||||
const newReplicationConfig = {
|
const newReplicationConfig = {
|
||||||
Role: 'arn:aws:iam::123456789012:role/src-resource,' +
|
Role: 'arn:aws:iam::123456789012:role/src-resource,' +
|
||||||
|
@ -357,7 +354,7 @@ Object.keys(acl).forEach(
|
||||||
};
|
};
|
||||||
dummyBucket.setReplicationConfiguration(newReplicationConfig);
|
dummyBucket.setReplicationConfiguration(newReplicationConfig);
|
||||||
});
|
});
|
||||||
it('setLifecycleConfiguration should set lifecycle ' +
|
test('setLifecycleConfiguration should set lifecycle ' +
|
||||||
'configuration', () => {
|
'configuration', () => {
|
||||||
const newLifecycleConfig = {
|
const newLifecycleConfig = {
|
||||||
rules: [
|
rules: [
|
||||||
|
|
|
@ -264,14 +264,13 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
|
||||||
tagObj = {};
|
tagObj = {};
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return MalformedXML error if request xml is empty', done => {
|
test('should return MalformedXML error if request xml is empty', done => {
|
||||||
const errMessage = 'request xml is undefined or empty';
|
const errMessage = 'request xml is undefined or empty';
|
||||||
checkError('', 'MalformedXML', errMessage, done);
|
checkError('', 'MalformedXML', errMessage, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
requiredTags.forEach(t => {
|
requiredTags.forEach(t => {
|
||||||
it(`should return ${t.error} error if ${t.tag} tag is missing`,
|
test(`should return ${t.error} error if ${t.tag} tag is missing`, done => {
|
||||||
done => {
|
|
||||||
generateParsedXml(t.tag, null, parsedXml => {
|
generateParsedXml(t.tag, null, parsedXml => {
|
||||||
checkError(parsedXml, t.error, t.errMessage, done);
|
checkError(parsedXml, t.error, t.errMessage, done);
|
||||||
});
|
});
|
||||||
|
@ -280,8 +279,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
|
||||||
|
|
||||||
notImplementedActions.forEach(action => {
|
notImplementedActions.forEach(action => {
|
||||||
const expError = 'NotImplemented';
|
const expError = 'NotImplemented';
|
||||||
it(`should return ${expError} error for ${action.tag} action`,
|
test(`should return ${expError} error for ${action.tag} action`, done => {
|
||||||
done => {
|
|
||||||
generateParsedXml('Action', action, parsedXml => {
|
generateParsedXml('Action', action, parsedXml => {
|
||||||
checkError(parsedXml, expError, action.errMessage, done);
|
checkError(parsedXml, expError, action.errMessage, done);
|
||||||
});
|
});
|
||||||
|
@ -289,8 +287,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
invalidActions.forEach(a => {
|
invalidActions.forEach(a => {
|
||||||
it(`should return ${a.error} for ${a.label} action error`,
|
test(`should return ${a.error} for ${a.label} action error`, done => {
|
||||||
done => {
|
|
||||||
generateParsedXml('Action', a, parsedXml => {
|
generateParsedXml('Action', a, parsedXml => {
|
||||||
checkError(parsedXml, a.error, a.errMessage, done);
|
checkError(parsedXml, a.error, a.errMessage, done);
|
||||||
});
|
});
|
||||||
|
@ -298,15 +295,17 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
invalidFilters.forEach(filter => {
|
invalidFilters.forEach(filter => {
|
||||||
it(`should return ${filter.error} for ${filter.label} filter error`,
|
test(
|
||||||
|
`should return ${filter.error} for ${filter.label} filter error`,
|
||||||
done => {
|
done => {
|
||||||
generateParsedXml('Filter', filter, parsedXml => {
|
generateParsedXml('Filter', filter, parsedXml => {
|
||||||
checkError(parsedXml, filter.error, filter.errMessage, done);
|
checkError(parsedXml, filter.error, filter.errMessage, done);
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return MalformedXML error if invalid status', done => {
|
test('should return MalformedXML error if invalid status', done => {
|
||||||
tagObj.status = 'foo';
|
tagObj.status = 'foo';
|
||||||
const errMessage = 'Status is not valid';
|
const errMessage = 'Status is not valid';
|
||||||
generateParsedXml('Status', tagObj, parsedXml => {
|
generateParsedXml('Status', tagObj, parsedXml => {
|
||||||
|
@ -314,7 +313,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest error if ID not unique', done => {
|
test('should return InvalidRequest error if ID not unique', done => {
|
||||||
tagObj.rule = 'not-unique-id';
|
tagObj.rule = 'not-unique-id';
|
||||||
const errMessage = 'Rule ID must be unique';
|
const errMessage = 'Rule ID must be unique';
|
||||||
generateParsedXml('Rule', tagObj, parsedXml => {
|
generateParsedXml('Rule', tagObj, parsedXml => {
|
||||||
|
@ -322,7 +321,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidArgument error if invalid ID', done => {
|
test('should return InvalidArgument error if invalid ID', done => {
|
||||||
tagObj.id = 'a'.repeat(256);
|
tagObj.id = 'a'.repeat(256);
|
||||||
const errMessage = 'Rule ID is greater than 255 characters long';
|
const errMessage = 'Rule ID is greater than 255 characters long';
|
||||||
generateParsedXml('ID', tagObj, parsedXml => {
|
generateParsedXml('ID', tagObj, parsedXml => {
|
||||||
|
@ -330,7 +329,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return MalformedXML error if over 1000 rules', done => {
|
test('should return MalformedXML error if over 1000 rules', done => {
|
||||||
tagObj.rule = 'too-many-rules';
|
tagObj.rule = 'too-many-rules';
|
||||||
const errMessage = 'request xml includes over max limit of 1000 rules';
|
const errMessage = 'request xml includes over max limit of 1000 rules';
|
||||||
generateParsedXml('Rule', tagObj, parsedXml => {
|
generateParsedXml('Rule', tagObj, parsedXml => {
|
||||||
|
@ -338,7 +337,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should use last listed Prefix if multiple Prefixes included', done => {
|
test('should use last listed Prefix if multiple Prefixes included', done => {
|
||||||
tagObj.label = 'mult-prefixes';
|
tagObj.label = 'mult-prefixes';
|
||||||
tagObj.lastPrefix = 'coco';
|
tagObj.lastPrefix = 'coco';
|
||||||
generateParsedXml('Filter', tagObj, parsedXml => {
|
generateParsedXml('Filter', tagObj, parsedXml => {
|
||||||
|
|
|
@ -19,14 +19,14 @@ const testRoutingRuleParams = {
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('RoutingRule class', () => {
|
describe('RoutingRule class', () => {
|
||||||
it('should initialize even if no parameters are provided', done => {
|
test('should initialize even if no parameters are provided', done => {
|
||||||
const routingRule = new RoutingRule();
|
const routingRule = new RoutingRule();
|
||||||
assert.strictEqual(routingRule._redirect, undefined);
|
assert.strictEqual(routingRule._redirect, undefined);
|
||||||
assert.strictEqual(routingRule._condition, undefined);
|
assert.strictEqual(routingRule._condition, undefined);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return a new routing rule', done => {
|
test('should return a new routing rule', done => {
|
||||||
const routingRule = new RoutingRule(testRoutingRuleParams);
|
const routingRule = new RoutingRule(testRoutingRuleParams);
|
||||||
assert.deepStrictEqual(routingRule._redirect,
|
assert.deepStrictEqual(routingRule._redirect,
|
||||||
testRoutingRuleParams.redirect);
|
testRoutingRuleParams.redirect);
|
||||||
|
@ -35,14 +35,14 @@ describe('RoutingRule class', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('getRedirect should fetch the instance\'s redirect', done => {
|
test('getRedirect should fetch the instance\'s redirect', done => {
|
||||||
const routingRule = new RoutingRule(testRoutingRuleParams);
|
const routingRule = new RoutingRule(testRoutingRuleParams);
|
||||||
assert.deepStrictEqual(routingRule.getRedirect(),
|
assert.deepStrictEqual(routingRule.getRedirect(),
|
||||||
testRoutingRuleParams.redirect);
|
testRoutingRuleParams.redirect);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('getCondition should fetch the instance\'s condition', done => {
|
test('getCondition should fetch the instance\'s condition', done => {
|
||||||
const routingRule = new RoutingRule(testRoutingRuleParams);
|
const routingRule = new RoutingRule(testRoutingRuleParams);
|
||||||
assert.deepStrictEqual(routingRule.getCondition(),
|
assert.deepStrictEqual(routingRule.getCondition(),
|
||||||
testRoutingRuleParams.condition);
|
testRoutingRuleParams.condition);
|
||||||
|
@ -51,7 +51,7 @@ describe('RoutingRule class', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('WebsiteConfiguration class', () => {
|
describe('WebsiteConfiguration class', () => {
|
||||||
it('should initialize even if no parameters are provided', done => {
|
test('should initialize even if no parameters are provided', done => {
|
||||||
const websiteConfig = new WebsiteConfiguration();
|
const websiteConfig = new WebsiteConfiguration();
|
||||||
assert.strictEqual(websiteConfig._indexDocument, undefined);
|
assert.strictEqual(websiteConfig._indexDocument, undefined);
|
||||||
assert.strictEqual(websiteConfig._errorDocument, undefined);
|
assert.strictEqual(websiteConfig._errorDocument, undefined);
|
||||||
|
@ -60,7 +60,7 @@ describe('WebsiteConfiguration class', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should initialize indexDocument, errorDocument during construction ' +
|
test('should initialize indexDocument, errorDocument during construction ' +
|
||||||
'if provided in params', done => {
|
'if provided in params', done => {
|
||||||
const testWebsiteConfigParams = {
|
const testWebsiteConfigParams = {
|
||||||
indexDocument: 'index.html',
|
indexDocument: 'index.html',
|
||||||
|
@ -72,7 +72,7 @@ describe('WebsiteConfiguration class', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should initialize redirectAllRequestsTo during construction if ' +
|
test('should initialize redirectAllRequestsTo during construction if ' +
|
||||||
'provided in params', done => {
|
'provided in params', done => {
|
||||||
const testWebsiteConfigParams = {
|
const testWebsiteConfigParams = {
|
||||||
redirectAllRequestsTo: {
|
redirectAllRequestsTo: {
|
||||||
|
@ -88,7 +88,7 @@ describe('WebsiteConfiguration class', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should initialize routingRules properly during construction from ' +
|
test('should initialize routingRules properly during construction from ' +
|
||||||
'array of RoutingRule class instances', done => {
|
'array of RoutingRule class instances', done => {
|
||||||
const testWebsiteConfigParams = {
|
const testWebsiteConfigParams = {
|
||||||
routingRules: [],
|
routingRules: [],
|
||||||
|
@ -103,7 +103,7 @@ describe('WebsiteConfiguration class', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should initialize routingRules properly during construction from ' +
|
test('should initialize routingRules properly during construction from ' +
|
||||||
'array of plain objects', done => {
|
'array of plain objects', done => {
|
||||||
const testWebsiteConfigParams = {
|
const testWebsiteConfigParams = {
|
||||||
routingRules: [],
|
routingRules: [],
|
||||||
|
@ -129,21 +129,21 @@ describe('WebsiteConfiguration class', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Getter/setter methods', () => {
|
describe('Getter/setter methods', () => {
|
||||||
it('for indexDocument should get/set indexDocument property', done => {
|
test('for indexDocument should get/set indexDocument property', done => {
|
||||||
const websiteConfig = new WebsiteConfiguration();
|
const websiteConfig = new WebsiteConfiguration();
|
||||||
websiteConfig.setIndexDocument('index.html');
|
websiteConfig.setIndexDocument('index.html');
|
||||||
assert.strictEqual(websiteConfig.getIndexDocument(), 'index.html');
|
assert.strictEqual(websiteConfig.getIndexDocument(), 'index.html');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('for errorDocument should get/set errorDocument property', done => {
|
test('for errorDocument should get/set errorDocument property', done => {
|
||||||
const websiteConfig = new WebsiteConfiguration();
|
const websiteConfig = new WebsiteConfiguration();
|
||||||
websiteConfig.setErrorDocument('error.html');
|
websiteConfig.setErrorDocument('error.html');
|
||||||
assert.strictEqual(websiteConfig.getErrorDocument(), 'error.html');
|
assert.strictEqual(websiteConfig.getErrorDocument(), 'error.html');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('for redirectAllRequestsTo should get/set redirectAllRequestsTo ' +
|
test('for redirectAllRequestsTo should get/set redirectAllRequestsTo ' +
|
||||||
'object', done => {
|
'object', done => {
|
||||||
const websiteConfig = new WebsiteConfiguration();
|
const websiteConfig = new WebsiteConfiguration();
|
||||||
const redirectAllRequestsTo = {
|
const redirectAllRequestsTo = {
|
||||||
|
@ -156,7 +156,7 @@ describe('WebsiteConfiguration class', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('for routingRules should get/set routingRules', done => {
|
test('for routingRules should get/set routingRules', done => {
|
||||||
const websiteConfig = new WebsiteConfiguration();
|
const websiteConfig = new WebsiteConfiguration();
|
||||||
const routingRules = [testRoutingRuleParams];
|
const routingRules = [testRoutingRuleParams];
|
||||||
websiteConfig.setRoutingRules(routingRules);
|
websiteConfig.setRoutingRules(routingRules);
|
||||||
|
@ -169,7 +169,7 @@ describe('WebsiteConfiguration class', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('addRoutingRule should add a RoutingRule to routingRules', done => {
|
test('addRoutingRule should add a RoutingRule to routingRules', done => {
|
||||||
const websiteConfig = new WebsiteConfiguration();
|
const websiteConfig = new WebsiteConfiguration();
|
||||||
websiteConfig.addRoutingRule(testRoutingRuleParams);
|
websiteConfig.addRoutingRule(testRoutingRuleParams);
|
||||||
assert(Array.isArray(websiteConfig._routingRules));
|
assert(Array.isArray(websiteConfig._routingRules));
|
||||||
|
|
|
@ -99,12 +99,12 @@ describe('ObjectMD class setters/getters', () => {
|
||||||
dataStoreVersionId: '',
|
dataStoreVersionId: '',
|
||||||
}],
|
}],
|
||||||
['DataStoreName', null, ''],
|
['DataStoreName', null, ''],
|
||||||
].forEach(test => {
|
].forEach(t => {
|
||||||
const property = test[0];
|
const property = t[0];
|
||||||
const testValue = test[1];
|
const testValue = t[1];
|
||||||
const defaultValue = test[2];
|
const defaultValue = t[2];
|
||||||
const testName = testValue === null ? 'get default' : 'get/set';
|
const testName = testValue === null ? 'get default' : 'get/set';
|
||||||
it(`${testName}: ${property}`, () => {
|
test(`${testName}: ${property}`, () => {
|
||||||
if (testValue !== null) {
|
if (testValue !== null) {
|
||||||
md[`set${property}`](testValue);
|
md[`set${property}`](testValue);
|
||||||
}
|
}
|
||||||
|
@ -120,7 +120,7 @@ describe('ObjectMD class setters/getters', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ObjectMD::setReplicationSiteStatus', () => {
|
test('ObjectMD::setReplicationSiteStatus', () => {
|
||||||
md.setReplicationInfo({
|
md.setReplicationInfo({
|
||||||
backends: [{
|
backends: [{
|
||||||
site: 'zenko',
|
site: 'zenko',
|
||||||
|
@ -136,7 +136,7 @@ describe('ObjectMD class setters/getters', () => {
|
||||||
}]);
|
}]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ObjectMD::setReplicationBackends', () => {
|
test('ObjectMD::setReplicationBackends', () => {
|
||||||
md.setReplicationBackends([{
|
md.setReplicationBackends([{
|
||||||
site: 'a',
|
site: 'a',
|
||||||
status: 'b',
|
status: 'b',
|
||||||
|
@ -149,12 +149,12 @@ describe('ObjectMD class setters/getters', () => {
|
||||||
}]);
|
}]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ObjectMD::setReplicationStorageClass', () => {
|
test('ObjectMD::setReplicationStorageClass', () => {
|
||||||
md.setReplicationStorageClass('a');
|
md.setReplicationStorageClass('a');
|
||||||
assert.strictEqual(md.getReplicationStorageClass(), 'a');
|
assert.strictEqual(md.getReplicationStorageClass(), 'a');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ObjectMD::getReplicationSiteStatus', () => {
|
test('ObjectMD::getReplicationSiteStatus', () => {
|
||||||
md.setReplicationInfo({
|
md.setReplicationInfo({
|
||||||
backends: [{
|
backends: [{
|
||||||
site: 'zenko',
|
site: 'zenko',
|
||||||
|
@ -165,7 +165,7 @@ describe('ObjectMD class setters/getters', () => {
|
||||||
assert.strictEqual(md.getReplicationSiteStatus('zenko'), 'PENDING');
|
assert.strictEqual(md.getReplicationSiteStatus('zenko'), 'PENDING');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ObjectMD::setReplicationSiteDataStoreVersionId', () => {
|
test('ObjectMD::setReplicationSiteDataStoreVersionId', () => {
|
||||||
md.setReplicationInfo({
|
md.setReplicationInfo({
|
||||||
backends: [{
|
backends: [{
|
||||||
site: 'zenko',
|
site: 'zenko',
|
||||||
|
@ -181,7 +181,7 @@ describe('ObjectMD class setters/getters', () => {
|
||||||
}]);
|
}]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('ObjectMD::getReplicationSiteDataStoreVersionId', () => {
|
test('ObjectMD::getReplicationSiteDataStoreVersionId', () => {
|
||||||
md.setReplicationInfo({
|
md.setReplicationInfo({
|
||||||
backends: [{
|
backends: [{
|
||||||
site: 'zenko',
|
site: 'zenko',
|
||||||
|
@ -195,7 +195,7 @@ describe('ObjectMD class setters/getters', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('ObjectMD import from stored blob', () => {
|
describe('ObjectMD import from stored blob', () => {
|
||||||
it('should export and import correctly the latest model version', () => {
|
test('should export and import correctly the latest model version', () => {
|
||||||
const md = new ObjectMD();
|
const md = new ObjectMD();
|
||||||
const jsonMd = md.getSerialized();
|
const jsonMd = md.getSerialized();
|
||||||
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
const importedRes = ObjectMD.createFromBlob(jsonMd);
|
||||||
|
@ -204,7 +204,7 @@ describe('ObjectMD import from stored blob', () => {
|
||||||
assert.deepStrictEqual(md, importedMd);
|
assert.deepStrictEqual(md, importedMd);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should convert old location to new location', () => {
|
test('should convert old location to new location', () => {
|
||||||
const md = new ObjectMD();
|
const md = new ObjectMD();
|
||||||
const value = md.getValue();
|
const value = md.getValue();
|
||||||
value['md-model-version'] = 1;
|
value['md-model-version'] = 1;
|
||||||
|
@ -220,7 +220,7 @@ describe('ObjectMD import from stored blob', () => {
|
||||||
[{ key: 'stringLocation' }]);
|
[{ key: 'stringLocation' }]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should keep null location as is', () => {
|
test('should keep null location as is', () => {
|
||||||
const md = new ObjectMD();
|
const md = new ObjectMD();
|
||||||
const value = md.getValue();
|
const value = md.getValue();
|
||||||
value.location = null;
|
value.location = null;
|
||||||
|
@ -234,7 +234,7 @@ describe('ObjectMD import from stored blob', () => {
|
||||||
assert.deepStrictEqual(importedMd.getValue().location, null);
|
assert.deepStrictEqual(importedMd.getValue().location, null);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should add dataStoreName attribute if missing', () => {
|
test('should add dataStoreName attribute if missing', () => {
|
||||||
const md = new ObjectMD();
|
const md = new ObjectMD();
|
||||||
const value = md.getValue();
|
const value = md.getValue();
|
||||||
value['md-model-version'] = 2;
|
value['md-model-version'] = 2;
|
||||||
|
@ -249,7 +249,8 @@ describe('ObjectMD import from stored blob', () => {
|
||||||
assert.notStrictEqual(valueImported.dataStoreName, undefined);
|
assert.notStrictEqual(valueImported.dataStoreName, undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return undefined for dataStoreVersionId if no object location',
|
test(
|
||||||
|
'should return undefined for dataStoreVersionId if no object location',
|
||||||
() => {
|
() => {
|
||||||
const md = new ObjectMD();
|
const md = new ObjectMD();
|
||||||
const value = md.getValue();
|
const value = md.getValue();
|
||||||
|
@ -258,9 +259,10 @@ describe('ObjectMD import from stored blob', () => {
|
||||||
assert.strictEqual(importedRes.error, undefined);
|
assert.strictEqual(importedRes.error, undefined);
|
||||||
const importedMd = importedRes.result;
|
const importedMd = importedRes.result;
|
||||||
assert.strictEqual(importedMd.getDataStoreVersionId(), undefined);
|
assert.strictEqual(importedMd.getDataStoreVersionId(), undefined);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should get dataStoreVersionId if saved in object location', () => {
|
test('should get dataStoreVersionId if saved in object location', () => {
|
||||||
const md = new ObjectMD();
|
const md = new ObjectMD();
|
||||||
const dummyLocation = {
|
const dummyLocation = {
|
||||||
dataStoreVersionId: 'data-store-version-id',
|
dataStoreVersionId: 'data-store-version-id',
|
||||||
|
@ -275,7 +277,7 @@ describe('ObjectMD import from stored blob', () => {
|
||||||
dummyLocation.dataStoreVersionId);
|
dummyLocation.dataStoreVersionId);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error if blob is malformed JSON', () => {
|
test('should return an error if blob is malformed JSON', () => {
|
||||||
const importedRes = ObjectMD.createFromBlob('{BAD JSON}');
|
const importedRes = ObjectMD.createFromBlob('{BAD JSON}');
|
||||||
assert.notStrictEqual(importedRes.error, undefined);
|
assert.notStrictEqual(importedRes.error, undefined);
|
||||||
assert.strictEqual(importedRes.result, undefined);
|
assert.strictEqual(importedRes.result, undefined);
|
||||||
|
@ -283,7 +285,7 @@ describe('ObjectMD import from stored blob', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('getAttributes static method', () => {
|
describe('getAttributes static method', () => {
|
||||||
it('should return object metadata attributes', () => {
|
test('should return object metadata attributes', () => {
|
||||||
const attributes = ObjectMD.getAttributes();
|
const attributes = ObjectMD.getAttributes();
|
||||||
const expectedResult = {
|
const expectedResult = {
|
||||||
'owner-display-name': true,
|
'owner-display-name': true,
|
||||||
|
|
|
@ -22,7 +22,7 @@ describe('round robin hosts', () => {
|
||||||
defaultPort: 1002 });
|
defaultPort: 1002 });
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should pick all hosts in turn', () => {
|
test('should pick all hosts in turn', () => {
|
||||||
const hostsPickCount = {
|
const hostsPickCount = {
|
||||||
'1.2.3.0': 0,
|
'1.2.3.0': 0,
|
||||||
'1.2.3.1': 0,
|
'1.2.3.1': 0,
|
||||||
|
@ -39,7 +39,7 @@ describe('round robin hosts', () => {
|
||||||
assert.strictEqual(hostsPickCount['1.2.3.2'], 30);
|
assert.strictEqual(hostsPickCount['1.2.3.2'], 30);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should pick the same current host up to stickyCount ' +
|
test('should pick the same current host up to stickyCount ' +
|
||||||
'with pickHost()', () => {
|
'with pickHost()', () => {
|
||||||
const hostsPickCount = {
|
const hostsPickCount = {
|
||||||
'1.2.3.0': 0,
|
'1.2.3.0': 0,
|
||||||
|
@ -56,7 +56,7 @@ describe('round robin hosts', () => {
|
||||||
assert.strictEqual(hostsPickCount[curHost.host], 10);
|
assert.strictEqual(hostsPickCount[curHost.host], 10);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should pick each host in turn with pickNextHost()', () => {
|
test('should pick each host in turn with pickNextHost()', () => {
|
||||||
const hostsPickCount = {
|
const hostsPickCount = {
|
||||||
'1.2.3.0': 0,
|
'1.2.3.0': 0,
|
||||||
'1.2.3.1': 0,
|
'1.2.3.1': 0,
|
||||||
|
@ -73,7 +73,7 @@ describe('round robin hosts', () => {
|
||||||
assert.strictEqual(hostsPickCount['1.2.3.2'], 3);
|
assert.strictEqual(hostsPickCount['1.2.3.2'], 3);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should refuse if no valid host/port is given', () => {
|
test('should refuse if no valid host/port is given', () => {
|
||||||
assert.throws(() => new RoundRobin([]), Error);
|
assert.throws(() => new RoundRobin([]), Error);
|
||||||
assert.throws(() => new RoundRobin([{}]), Error);
|
assert.throws(() => new RoundRobin([{}]), Error);
|
||||||
assert.throws(() => new RoundRobin([
|
assert.throws(() => new RoundRobin([
|
||||||
|
@ -100,7 +100,7 @@ describe('round robin hosts', () => {
|
||||||
new RoundRobin(['zenko.io', 'zenka.ia']);
|
new RoundRobin(['zenko.io', 'zenka.ia']);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should have set default port if not in bootstrap list', () => {
|
test('should have set default port if not in bootstrap list', () => {
|
||||||
// the current host should be picked 10 times in a row
|
// the current host should be picked 10 times in a row
|
||||||
const portMap = {
|
const portMap = {
|
||||||
'1.2.3.0': 1000,
|
'1.2.3.0': 1000,
|
||||||
|
|
|
@ -73,23 +73,22 @@ describe('network.Server: ', () => {
|
||||||
key: httpsRef.key,
|
key: httpsRef.key,
|
||||||
ca: httpsRef.ca,
|
ca: httpsRef.ca,
|
||||||
}],
|
}],
|
||||||
].forEach(test => {
|
].forEach(t => {
|
||||||
function createServer() {
|
function createServer() {
|
||||||
const ws = new Server(3000, log);
|
const ws = new Server(3000, log);
|
||||||
ws.setHttps(test[1].cert, test[1].key, test[1].ca, false);
|
ws.setHttps(t[1].cert, t[1].key, t[1].ca, false);
|
||||||
return ws;
|
return ws;
|
||||||
}
|
}
|
||||||
|
|
||||||
describe(test[0], () => {
|
describe(t[0], () => {
|
||||||
it('should start', done => {
|
test('should start', done => {
|
||||||
const ws = createServer().onError(done).onListening(() => {
|
const ws = createServer().onError(done).onListening(() => {
|
||||||
ws.onStop(done);
|
ws.onStop(done);
|
||||||
ws.stop();
|
ws.stop();
|
||||||
}).start();
|
}).start();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return EADDRINUSE on binding port already taken',
|
test('should return EADDRINUSE on binding port already taken', done => {
|
||||||
done => {
|
|
||||||
const ws = createServer().onError(done)
|
const ws = createServer().onError(done)
|
||||||
.onListening(() => {
|
.onListening(() => {
|
||||||
const bindingTimeout = setTimeout(() => {
|
const bindingTimeout = setTimeout(() => {
|
||||||
|
@ -109,7 +108,7 @@ describe('network.Server: ', () => {
|
||||||
}).start();
|
}).start();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InternalError when no request handler', done => {
|
test('should return InternalError when no request handler', done => {
|
||||||
const ws = createServer().onError(done).onListening(() => {
|
const ws = createServer().onError(done).onListening(() => {
|
||||||
const requestTimeout = setTimeout(() => {
|
const requestTimeout = setTimeout(() => {
|
||||||
ws.onStop(() => done('No response received')).stop();
|
ws.onStop(() => done('No response received')).stop();
|
||||||
|
@ -130,7 +129,7 @@ describe('network.Server: ', () => {
|
||||||
}).start();
|
}).start();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 200 OK with "done" as content', done => {
|
test('should return 200 OK with "done" as content', done => {
|
||||||
const ws = createServer().onError(done).onListening(() => {
|
const ws = createServer().onError(done).onListening(() => {
|
||||||
const requestTimeout = setTimeout(() => {
|
const requestTimeout = setTimeout(() => {
|
||||||
ws.onStop(() => done('No response received')).stop();
|
ws.onStop(() => done('No response received')).stop();
|
||||||
|
@ -155,7 +154,7 @@ describe('network.Server: ', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail when the server is twoWay', done => {
|
test('should fail when the server is twoWay', done => {
|
||||||
const ws = new Server(3000, log);
|
const ws = new Server(3000, log);
|
||||||
ws.setHttps(httpsRef.cert, httpsRef.key, httpsRef.ca, true);
|
ws.setHttps(httpsRef.cert, httpsRef.key, httpsRef.ca, true);
|
||||||
ws.onError(done).onListening(() => {
|
ws.onError(done).onListening(() => {
|
||||||
|
|
|
@ -26,8 +26,7 @@ describe('parseRangeSpec function', () => {
|
||||||
].forEach(testCase => {
|
].forEach(testCase => {
|
||||||
const { rangeHeader, expectedRangeSpec } = testCase;
|
const { rangeHeader, expectedRangeSpec } = testCase;
|
||||||
|
|
||||||
it(`should return ${expectedRangeSpec} on range "${rangeHeader}"`,
|
test(`should return ${expectedRangeSpec} on range "${rangeHeader}"`, () => {
|
||||||
() => {
|
|
||||||
const rangeSpec = parseRangeSpec(rangeHeader);
|
const rangeSpec = parseRangeSpec(rangeHeader);
|
||||||
if (expectedRangeSpec.error) {
|
if (expectedRangeSpec.error) {
|
||||||
assert(rangeSpec.error);
|
assert(rangeSpec.error);
|
||||||
|
@ -67,7 +66,7 @@ describe('getByteRangeFromSpec function', () => {
|
||||||
].forEach(testCase => {
|
].forEach(testCase => {
|
||||||
const { rangeSpec, objectSize, expectedByteRange } = testCase;
|
const { rangeSpec, objectSize, expectedByteRange } = testCase;
|
||||||
|
|
||||||
it(`should transform spec ${rangeSpec} with object size ` +
|
test(`should transform spec ${rangeSpec} with object size ` +
|
||||||
`${objectSize} to byte range ${expectedByteRange}`, () => {
|
`${objectSize} to byte range ${expectedByteRange}`, () => {
|
||||||
const byteRange = getByteRangeFromSpec(rangeSpec, objectSize);
|
const byteRange = getByteRangeFromSpec(rangeSpec, objectSize);
|
||||||
if (expectedByteRange.error) {
|
if (expectedByteRange.error) {
|
||||||
|
@ -84,58 +83,58 @@ describe('getByteRangeFromSpec function', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('parseRange function', () => {
|
describe('parseRange function', () => {
|
||||||
it('should return an object with the start and end if range is '
|
test('should return an object with the start and end if range is '
|
||||||
+ 'valid', () => {
|
+ 'valid', () => {
|
||||||
checkParseRange('bytes=0-9', 10, [0, 9]);
|
checkParseRange('bytes=0-9', 10, [0, 9]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should set the end of the range at the total object length minus 1 ' +
|
test('should set the end of the range at the total object length minus 1 ' +
|
||||||
'if the provided end of range goes beyond the end of the object ' +
|
'if the provided end of range goes beyond the end of the object ' +
|
||||||
'length', () => {
|
'length', () => {
|
||||||
checkParseRange('bytes=0-9', 8, [0, 7]);
|
checkParseRange('bytes=0-9', 8, [0, 7]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle incomplete range specifier where only end offset is ' +
|
test('should handle incomplete range specifier where only end offset is ' +
|
||||||
'provided', () => {
|
'provided', () => {
|
||||||
checkParseRange('bytes=-500', 10000, [9500, 9999]);
|
checkParseRange('bytes=-500', 10000, [9500, 9999]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle incomplete range specifier where only start ' +
|
test('should handle incomplete range specifier where only start ' +
|
||||||
'provided', () => {
|
'provided', () => {
|
||||||
checkParseRange('bytes=9500-', 10000, [9500, 9999]);
|
checkParseRange('bytes=9500-', 10000, [9500, 9999]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return undefined for the range if the range header ' +
|
test('should return undefined for the range if the range header ' +
|
||||||
'format is invalid: missing equal', () => {
|
'format is invalid: missing equal', () => {
|
||||||
checkParseRange('bytes0-9', 10);
|
checkParseRange('bytes0-9', 10);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return undefined for the range if the range header ' +
|
test('should return undefined for the range if the range header ' +
|
||||||
'format is invalid: missing dash', () => {
|
'format is invalid: missing dash', () => {
|
||||||
checkParseRange('bytes=09', 10);
|
checkParseRange('bytes=09', 10);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return undefined for the range if the range header ' +
|
test('should return undefined for the range if the range header ' +
|
||||||
'format is invalid: value invalid character', () => {
|
'format is invalid: value invalid character', () => {
|
||||||
checkParseRange('bytes=%-4', 10);
|
checkParseRange('bytes=%-4', 10);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return undefined for the range if the range header ' +
|
test('should return undefined for the range if the range header ' +
|
||||||
'format is invalid: value not int', () => {
|
'format is invalid: value not int', () => {
|
||||||
checkParseRange('bytes=4-a', 10);
|
checkParseRange('bytes=4-a', 10);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return undefined for the range if the range header ' +
|
test('should return undefined for the range if the range header ' +
|
||||||
'format is invalid: start > end', () => {
|
'format is invalid: start > end', () => {
|
||||||
checkParseRange('bytes=5-4', 10);
|
checkParseRange('bytes=5-4', 10);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return undefined for the range if the range header ' +
|
test('should return undefined for the range if the range header ' +
|
||||||
'format is invalid: negative start bound', () => {
|
'format is invalid: negative start bound', () => {
|
||||||
checkParseRange('bytes=-2-5', 10);
|
checkParseRange('bytes=-2-5', 10);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRange if the range of the resource ' +
|
test('should return InvalidRange if the range of the resource ' +
|
||||||
'does not cover the byte range', () => {
|
'does not cover the byte range', () => {
|
||||||
const rangeHeader = 'bytes=10-30';
|
const rangeHeader = 'bytes=10-30';
|
||||||
const totalLength = 10;
|
const totalLength = 10;
|
||||||
|
@ -143,50 +142,49 @@ describe('parseRange function', () => {
|
||||||
assert.strictEqual(error.code, 416);
|
assert.strictEqual(error.code, 416);
|
||||||
assert.strictEqual(range, undefined);
|
assert.strictEqual(range, undefined);
|
||||||
});
|
});
|
||||||
it('should return undefined for "bytes=-" request (invalid syntax) ',
|
test('should return undefined for "bytes=-" request (invalid syntax) ', () => {
|
||||||
() => {
|
|
||||||
checkParseRange('bytes=-', 10);
|
checkParseRange('bytes=-', 10);
|
||||||
});
|
});
|
||||||
it('should return undefined for "bytes=-" request (invalid syntax, ' +
|
test('should return undefined for "bytes=-" request (invalid syntax, ' +
|
||||||
'empty object)', () => {
|
'empty object)', () => {
|
||||||
checkParseRange('bytes=-', 0);
|
checkParseRange('bytes=-', 0);
|
||||||
});
|
});
|
||||||
it('should return undefined for "bytes=10-9" request (invalid syntax, ' +
|
test('should return undefined for "bytes=10-9" request (invalid syntax, ' +
|
||||||
'empty object)', () => {
|
'empty object)', () => {
|
||||||
checkParseRange('bytes=10-9', 0);
|
checkParseRange('bytes=10-9', 0);
|
||||||
});
|
});
|
||||||
it('should return InvalidRange on 0-byte suffix range request', () => {
|
test('should return InvalidRange on 0-byte suffix range request', () => {
|
||||||
const rangeHeader = 'bytes=-0';
|
const rangeHeader = 'bytes=-0';
|
||||||
const { range, error } = parseRange(rangeHeader, 10);
|
const { range, error } = parseRange(rangeHeader, 10);
|
||||||
assert.strictEqual(error.code, 416);
|
assert.strictEqual(error.code, 416);
|
||||||
assert.strictEqual(range, undefined);
|
assert.strictEqual(range, undefined);
|
||||||
});
|
});
|
||||||
it('should return InvalidRange on 0-byte suffix range request ' +
|
test('should return InvalidRange on 0-byte suffix range request ' +
|
||||||
'(empty object)', () => {
|
'(empty object)', () => {
|
||||||
const rangeHeader = 'bytes=-0';
|
const rangeHeader = 'bytes=-0';
|
||||||
const { range, error } = parseRange(rangeHeader, 0);
|
const { range, error } = parseRange(rangeHeader, 0);
|
||||||
assert.strictEqual(error.code, 416);
|
assert.strictEqual(error.code, 416);
|
||||||
assert.strictEqual(range, undefined);
|
assert.strictEqual(range, undefined);
|
||||||
});
|
});
|
||||||
it('should return undefined on suffix range request on empty ' +
|
test('should return undefined on suffix range request on empty ' +
|
||||||
'object', () => {
|
'object', () => {
|
||||||
checkParseRange('bytes=-10', 0);
|
checkParseRange('bytes=-10', 0);
|
||||||
});
|
});
|
||||||
it('should return InvalidRange on empty object when only start==0 ' +
|
test('should return InvalidRange on empty object when only start==0 ' +
|
||||||
'provided', () => {
|
'provided', () => {
|
||||||
const rangeHeader = 'bytes=0-';
|
const rangeHeader = 'bytes=0-';
|
||||||
const { range, error } = parseRange(rangeHeader, 0);
|
const { range, error } = parseRange(rangeHeader, 0);
|
||||||
assert.strictEqual(error.code, 416);
|
assert.strictEqual(error.code, 416);
|
||||||
assert.strictEqual(range, undefined);
|
assert.strictEqual(range, undefined);
|
||||||
});
|
});
|
||||||
it('should return InvalidRange on empty object when only start!=0 ' +
|
test('should return InvalidRange on empty object when only start!=0 ' +
|
||||||
'provided', () => {
|
'provided', () => {
|
||||||
const rangeHeader = 'bytes=10-';
|
const rangeHeader = 'bytes=10-';
|
||||||
const { range, error } = parseRange(rangeHeader, 0);
|
const { range, error } = parseRange(rangeHeader, 0);
|
||||||
assert.strictEqual(error.code, 416);
|
assert.strictEqual(error.code, 416);
|
||||||
assert.strictEqual(range, undefined);
|
assert.strictEqual(range, undefined);
|
||||||
});
|
});
|
||||||
it('should return InvalidRange on empty object when start and end ' +
|
test('should return InvalidRange on empty object when start and end ' +
|
||||||
'are provided', () => {
|
'are provided', () => {
|
||||||
const rangeHeader = 'bytes=10-30';
|
const rangeHeader = 'bytes=10-30';
|
||||||
const { range, error } = parseRange(rangeHeader, 0);
|
const { range, error } = parseRange(rangeHeader, 0);
|
||||||
|
|
|
@ -53,17 +53,17 @@ describe('REST interface for blob data storage', () => {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
setup(done);
|
setup(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
server.stop();
|
server.stop();
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('simple tests', () => {
|
describe('simple tests', () => {
|
||||||
it('should be able to PUT, GET and DELETE an object', done => {
|
test('should be able to PUT, GET and DELETE an object', done => {
|
||||||
const contents = 'This is the contents of the new object';
|
const contents = 'This is the contents of the new object';
|
||||||
let objKey;
|
let objKey;
|
||||||
|
|
||||||
|
@ -133,7 +133,7 @@ describe('REST interface for blob data storage', () => {
|
||||||
`bytes ${expectedStart}-${expectedEnd}/${contents.length}`);
|
`bytes ${expectedStart}-${expectedEnd}/${contents.length}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
const rs = createReadStream(contents);
|
const rs = createReadStream(contents);
|
||||||
client.put(rs, contents.length, '1', (err, key) => {
|
client.put(rs, contents.length, '1', (err, key) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
@ -164,10 +164,9 @@ describe('REST interface for blob data storage', () => {
|
||||||
{ range: [contents.length - 1, undefined],
|
{ range: [contents.length - 1, undefined],
|
||||||
sliceArgs: [-1], contentRange: [contents.length - 1,
|
sliceArgs: [-1], contentRange: [contents.length - 1,
|
||||||
contents.length - 1] }]
|
contents.length - 1] }]
|
||||||
.forEach((test, i) => {
|
.forEach((t, i) => {
|
||||||
const { range, sliceArgs, contentRange } = test;
|
const { range, sliceArgs, contentRange } = t;
|
||||||
it(`should get the correct range ${range[0]}-${range[1]}`,
|
test(`should get the correct range ${range[0]}-${range[1]}`, done => {
|
||||||
done => {
|
|
||||||
client.get(
|
client.get(
|
||||||
objKey, range,
|
objKey, range,
|
||||||
(1000 + i).toString(), (err, resp) => {
|
(1000 + i).toString(), (err, resp) => {
|
||||||
|
@ -190,11 +189,10 @@ describe('REST interface for blob data storage', () => {
|
||||||
{ range: [0, undefined], emptyObject: true },
|
{ range: [0, undefined], emptyObject: true },
|
||||||
{ range: [0, 10], emptyObject: true },
|
{ range: [0, 10], emptyObject: true },
|
||||||
{ range: [undefined, 0], emptyObject: true }]
|
{ range: [undefined, 0], emptyObject: true }]
|
||||||
.forEach((test, i) => {
|
.forEach((t, i) => {
|
||||||
const { range, emptyObject } = test;
|
const { range, emptyObject } = t;
|
||||||
it(`should get error 416 on ${range[0]}-${range[1]}` +
|
test(`should get error 416 on ${range[0]}-${range[1]}` +
|
||||||
`${emptyObject ? ' (empty object)' : ''}`,
|
`${emptyObject ? ' (empty object)' : ''}`, done => {
|
||||||
done => {
|
|
||||||
const key = (emptyObject ? emptyObjKey : objKey);
|
const key = (emptyObject ? emptyObjKey : objKey);
|
||||||
client.get(
|
client.get(
|
||||||
key, range,
|
key, range,
|
||||||
|
@ -206,7 +204,7 @@ describe('REST interface for blob data storage', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get 200 OK on both range boundaries undefined', done => {
|
test('should get 200 OK on both range boundaries undefined', done => {
|
||||||
client.get(objKey, [undefined, undefined], '3001', (err, resp) => {
|
client.get(objKey, [undefined, undefined], '3001', (err, resp) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
const value = resp.read();
|
const value = resp.read();
|
||||||
|
@ -214,8 +212,7 @@ describe('REST interface for blob data storage', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should get 200 OK on range query "bytes=-10" of empty object',
|
test('should get 200 OK on range query "bytes=-10" of empty object', done => {
|
||||||
done => {
|
|
||||||
client.get(emptyObjKey, [undefined, 10], '3002', (err, resp) => {
|
client.get(emptyObjKey, [undefined, 10], '3002', (err, resp) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
const value = resp.read();
|
const value = resp.read();
|
||||||
|
|
|
@ -161,7 +161,7 @@ describe('level-net - LevelDB over network', () => {
|
||||||
async.series(opList, cb);
|
async.series(opList, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
temp.mkdir('level-net-testdb-', (err, dbDir) => {
|
temp.mkdir('level-net-testdb-', (err, dbDir) => {
|
||||||
const rootDb = level(dbDir);
|
const rootDb = level(dbDir);
|
||||||
db = sublevel(rootDb);
|
db = sublevel(rootDb);
|
||||||
|
@ -169,7 +169,7 @@ describe('level-net - LevelDB over network', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
client.once('disconnect', () => {
|
client.once('disconnect', () => {
|
||||||
server.close();
|
server.close();
|
||||||
done();
|
done();
|
||||||
|
@ -178,17 +178,17 @@ describe('level-net - LevelDB over network', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('simple tests', () => {
|
describe('simple tests', () => {
|
||||||
it('should be able to perform a complete CRUD test', done => {
|
test('should be able to perform a complete CRUD test', done => {
|
||||||
doCRUDTest(client, 'CRUD', 'testkey', done);
|
doCRUDTest(client, 'CRUD', 'testkey', done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
describe('sublevel tests', () => {
|
describe('sublevel tests', () => {
|
||||||
it('should be able to do CRUD on sublevel', done => {
|
test('should be able to do CRUD on sublevel', done => {
|
||||||
const subLevel = client.openSub('sub1');
|
const subLevel = client.openSub('sub1');
|
||||||
assert(subLevel);
|
assert(subLevel);
|
||||||
doCRUDTest(subLevel, 'CRUD', 'subkey', done);
|
doCRUDTest(subLevel, 'CRUD', 'subkey', done);
|
||||||
});
|
});
|
||||||
it('should be able to re-open a sublevel', done => {
|
test('should be able to re-open a sublevel', done => {
|
||||||
const subLevel = client.openSub('sub2');
|
const subLevel = client.openSub('sub2');
|
||||||
doCRUDTest(subLevel, 'C', 'subkey', err => {
|
doCRUDTest(subLevel, 'C', 'subkey', err => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
@ -196,7 +196,7 @@ describe('level-net - LevelDB over network', () => {
|
||||||
doCRUDTest(subLevel2, 'RD', 'subkey', done);
|
doCRUDTest(subLevel2, 'RD', 'subkey', done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should separate sublevel namespaces correctly', done => {
|
test('should separate sublevel namespaces correctly', done => {
|
||||||
const subLevel = client.openSub('sub3');
|
const subLevel = client.openSub('sub3');
|
||||||
doCRUDTest(subLevel, 'C', 'subkey', err => {
|
doCRUDTest(subLevel, 'C', 'subkey', err => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
@ -208,7 +208,7 @@ describe('level-net - LevelDB over network', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should be able to nest multiple sub-levels', done => {
|
test('should be able to nest multiple sub-levels', done => {
|
||||||
const subLevel = client.openSub('sub4');
|
const subLevel = client.openSub('sub4');
|
||||||
const nestedSub1 = subLevel.openSub('sub4-nested1');
|
const nestedSub1 = subLevel.openSub('sub4-nested1');
|
||||||
const nestedSub2 = nestedSub1.openSub('nested-nested');
|
const nestedSub2 = nestedSub1.openSub('nested-nested');
|
||||||
|
@ -249,10 +249,10 @@ describe('level-net - LevelDB over network', () => {
|
||||||
.put(keyOfIter(i), valueOfIter(i), params, putCb);
|
.put(keyOfIter(i), valueOfIter(i), params, putCb);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
prefillKeys(done);
|
prefillKeys(done);
|
||||||
});
|
});
|
||||||
it('should be able to read keys back at random', done => {
|
test('should be able to read keys back at random', done => {
|
||||||
const nbGet = 100;
|
const nbGet = 100;
|
||||||
let nbGetDone = 0;
|
let nbGetDone = 0;
|
||||||
|
|
||||||
|
@ -272,7 +272,7 @@ describe('level-net - LevelDB over network', () => {
|
||||||
.get(keyOfIter(randI), params, getCb);
|
.get(keyOfIter(randI), params, getCb);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
it('should be able to list all keys through a stream and ' +
|
test('should be able to list all keys through a stream and ' +
|
||||||
'rewrite them on-the-fly', done => {
|
'rewrite them on-the-fly', done => {
|
||||||
client.createReadStream((err, keyStream) => {
|
client.createReadStream((err, keyStream) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
@ -311,7 +311,7 @@ describe('level-net - LevelDB over network', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should be able to abort key listing properly when client ' +
|
test('should be able to abort key listing properly when client ' +
|
||||||
'destroys the stream', done => {
|
'destroys the stream', done => {
|
||||||
client.createReadStream((err, keyStream) => {
|
client.createReadStream((err, keyStream) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
@ -344,7 +344,7 @@ describe('level-net - LevelDB over network', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should delete all keys successfully', done => {
|
test('should delete all keys successfully', done => {
|
||||||
let nbDelDone = 0;
|
let nbDelDone = 0;
|
||||||
|
|
||||||
function checkAllDeleted(done) {
|
function checkAllDeleted(done) {
|
||||||
|
|
|
@ -53,11 +53,11 @@ describe('rpc - generic client/server RPC system', () => {
|
||||||
miscClient.connect(done);
|
miscClient.connect(done);
|
||||||
}
|
}
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
setupRPC(done);
|
setupRPC(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
miscClient.once('disconnect', () => {
|
miscClient.once('disconnect', () => {
|
||||||
server.close();
|
server.close();
|
||||||
done();
|
done();
|
||||||
|
@ -66,7 +66,7 @@ describe('rpc - generic client/server RPC system', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('simple tests', () => {
|
describe('simple tests', () => {
|
||||||
it('should ping an RPC server (sync on server)', done => {
|
test('should ping an RPC server (sync on server)', done => {
|
||||||
miscClient.withRequestLogger(reqLogger).ping((err, args) => {
|
miscClient.withRequestLogger(reqLogger).ping((err, args) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
|
@ -75,7 +75,7 @@ describe('rpc - generic client/server RPC system', () => {
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should ping an RPC server (async on server)', done => {
|
test('should ping an RPC server (async on server)', done => {
|
||||||
miscClient.withRequestLogger(reqLogger).pingAsync((err, args) => {
|
miscClient.withRequestLogger(reqLogger).pingAsync((err, args) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
|
@ -87,7 +87,7 @@ describe('rpc - generic client/server RPC system', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('error tests', () => {
|
describe('error tests', () => {
|
||||||
it('should timeout if command is too long to respond', done => {
|
test('should timeout if command is too long to respond', done => {
|
||||||
// shorten the timeout to 200ms to speed up the test
|
// shorten the timeout to 200ms to speed up the test
|
||||||
const oldTimeout = miscClient.getCallTimeout();
|
const oldTimeout = miscClient.getCallTimeout();
|
||||||
miscClient.setCallTimeout(200);
|
miscClient.setCallTimeout(200);
|
||||||
|
@ -98,7 +98,7 @@ describe('rpc - generic client/server RPC system', () => {
|
||||||
});
|
});
|
||||||
miscClient.setCallTimeout(oldTimeout);
|
miscClient.setCallTimeout(oldTimeout);
|
||||||
});
|
});
|
||||||
it('should throw if last argument of call is not a callback', done => {
|
test('should throw if last argument of call is not a callback', done => {
|
||||||
assert.throws(() => {
|
assert.throws(() => {
|
||||||
miscClient.withRequestLogger(reqLogger).pingAsync(
|
miscClient.withRequestLogger(reqLogger).pingAsync(
|
||||||
'not a callback');
|
'not a callback');
|
||||||
|
|
|
@ -48,7 +48,7 @@ beforeEach(() => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Policies validation - Invalid JSON', () => {
|
describe('Policies validation - Invalid JSON', () => {
|
||||||
it('should return error for invalid JSON', () => {
|
test('should return error for invalid JSON', () => {
|
||||||
const result = validateUserPolicy('{"Version":"2012-10-17",' +
|
const result = validateUserPolicy('{"Version":"2012-10-17",' +
|
||||||
'"Statement":{"Effect":"Allow""Action":"s3:PutObject",' +
|
'"Statement":{"Effect":"Allow""Action":"s3:PutObject",' +
|
||||||
'"Resource":"arn:aws:s3*"}}');
|
'"Resource":"arn:aws:s3*"}}');
|
||||||
|
@ -57,16 +57,16 @@ describe('Policies validation - Invalid JSON', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Policies validation - Version', () => {
|
describe('Policies validation - Version', () => {
|
||||||
it('should validate with version date 2012-10-17', () => {
|
test('should validate with version date 2012-10-17', () => {
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error for other dates', () => {
|
test('should return error for other dates', () => {
|
||||||
policy.Version = '2012-11-17';
|
policy.Version = '2012-11-17';
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if Version field is missing', () => {
|
test('should return error if Version field is missing', () => {
|
||||||
policy.Version = undefined;
|
policy.Version = undefined;
|
||||||
check(policy, failRes(errDict.required.Version));
|
check(policy, failRes(errDict.required.Version));
|
||||||
});
|
});
|
||||||
|
@ -144,15 +144,15 @@ describe('Policies validation - Principal', () => {
|
||||||
name: 'with backbeat service',
|
name: 'with backbeat service',
|
||||||
value: { Service: 'backbeat' },
|
value: { Service: 'backbeat' },
|
||||||
},
|
},
|
||||||
].forEach(test => {
|
].forEach(t => {
|
||||||
it(`should allow principal field with ${test.name}`, () => {
|
test(`should allow principal field with ${t.name}`, () => {
|
||||||
policy.Statement.Principal = test.value;
|
policy.Statement.Principal = t.value;
|
||||||
delete policy.Statement.Resource;
|
delete policy.Statement.Resource;
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`shoud allow notPrincipal field with ${test.name}`, () => {
|
test(`shoud allow notPrincipal field with ${t.name}`, () => {
|
||||||
policy.Statement.NotPrincipal = test.value;
|
policy.Statement.NotPrincipal = t.value;
|
||||||
delete policy.Statement.Resource;
|
delete policy.Statement.Resource;
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
@ -244,26 +244,26 @@ describe('Policies validation - Principal', () => {
|
||||||
name: 'with other service than backbeat',
|
name: 'with other service than backbeat',
|
||||||
value: { Service: 'non-existent-service' },
|
value: { Service: 'non-existent-service' },
|
||||||
},
|
},
|
||||||
].forEach(test => {
|
].forEach(t => {
|
||||||
it(`should fail with ${test.name}`, () => {
|
test(`should fail with ${t.name}`, () => {
|
||||||
policy.Statement.Principal = test.value;
|
policy.Statement.Principal = t.value;
|
||||||
delete policy.Statement.Resource;
|
delete policy.Statement.Resource;
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow Resource field', () => {
|
test('should not allow Resource field', () => {
|
||||||
policy.Statement.Principal = '*';
|
policy.Statement.Principal = '*';
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Policies validation - Statement', () => {
|
describe('Policies validation - Statement', () => {
|
||||||
it('should succeed for a valid object', () => {
|
test('should succeed for a valid object', () => {
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should succeed for a valid array', () => {
|
test('should succeed for a valid array', () => {
|
||||||
policy.Statement = [
|
policy.Statement = [
|
||||||
{
|
{
|
||||||
Effect: 'Allow',
|
Effect: 'Allow',
|
||||||
|
@ -279,43 +279,43 @@ describe('Policies validation - Statement', () => {
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error for undefined', () => {
|
test('should return an error for undefined', () => {
|
||||||
policy.Statement = undefined;
|
policy.Statement = undefined;
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error for an empty list', () => {
|
test('should return an error for an empty list', () => {
|
||||||
policy.Statement = [];
|
policy.Statement = [];
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error for an empty object', () => {
|
test('should return an error for an empty object', () => {
|
||||||
policy.Statement = {};
|
policy.Statement = {};
|
||||||
check(policy, failRes(errDict.required.Action));
|
check(policy, failRes(errDict.required.Action));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error for missing a required field - Action', () => {
|
test('should return an error for missing a required field - Action', () => {
|
||||||
delete policy.Statement.Action;
|
delete policy.Statement.Action;
|
||||||
check(policy, failRes(errDict.required.Action));
|
check(policy, failRes(errDict.required.Action));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error for missing a required field - Effect', () => {
|
test('should return an error for missing a required field - Effect', () => {
|
||||||
delete policy.Statement.Effect;
|
delete policy.Statement.Effect;
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error for missing a required field - Resource', () => {
|
test('should return an error for missing a required field - Resource', () => {
|
||||||
delete policy.Statement.Resource;
|
delete policy.Statement.Resource;
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error for missing multiple required fields', () => {
|
test('should return an error for missing multiple required fields', () => {
|
||||||
delete policy.Statement.Effect;
|
delete policy.Statement.Effect;
|
||||||
delete policy.Statement.Resource;
|
delete policy.Statement.Resource;
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should succeed with optional fields missing - Sid, Condition', () => {
|
test('should succeed with optional fields missing - Sid, Condition', () => {
|
||||||
delete policy.Statement.Sid;
|
delete policy.Statement.Sid;
|
||||||
delete policy.Statement.Condition;
|
delete policy.Statement.Condition;
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
|
@ -323,37 +323,37 @@ describe('Policies validation - Statement', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Policies validation - Statement::Sid_block', () => {
|
describe('Policies validation - Statement::Sid_block', () => {
|
||||||
it('should succeed if Sid is any alphanumeric string', () => {
|
test('should succeed if Sid is any alphanumeric string', () => {
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail if Sid is not a valid format', () => {
|
test('should fail if Sid is not a valid format', () => {
|
||||||
policy.Statement.Sid = 'foo bar()';
|
policy.Statement.Sid = 'foo bar()';
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail if Sid is not a string', () => {
|
test('should fail if Sid is not a string', () => {
|
||||||
policy.Statement.Sid = 1234;
|
policy.Statement.Sid = 1234;
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Policies validation - Statement::Effect_block', () => {
|
describe('Policies validation - Statement::Effect_block', () => {
|
||||||
it('should succeed for Allow', () => {
|
test('should succeed for Allow', () => {
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should succeed for Deny', () => {
|
test('should succeed for Deny', () => {
|
||||||
policy.Statement.Effect = 'Deny';
|
policy.Statement.Effect = 'Deny';
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail for strings other than Allow/Deny', () => {
|
test('should fail for strings other than Allow/Deny', () => {
|
||||||
policy.Statement.Effect = 'Reject';
|
policy.Statement.Effect = 'Reject';
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail if Effect is not a string', () => {
|
test('should fail if Effect is not a string', () => {
|
||||||
policy.Statement.Effect = 1;
|
policy.Statement.Effect = 1;
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
|
@ -366,7 +366,7 @@ describe('Policies validation - Statement::Action_block/' +
|
||||||
policy.Statement.NotAction = undefined;
|
policy.Statement.NotAction = undefined;
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should succeed for foo:bar', () => {
|
test('should succeed for foo:bar', () => {
|
||||||
policy.Statement.Action = 'foo:bar';
|
policy.Statement.Action = 'foo:bar';
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
|
|
||||||
|
@ -375,7 +375,7 @@ describe('Policies validation - Statement::Action_block/' +
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should succeed for foo:*', () => {
|
test('should succeed for foo:*', () => {
|
||||||
policy.Statement.Action = 'foo:*';
|
policy.Statement.Action = 'foo:*';
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
|
|
||||||
|
@ -384,7 +384,7 @@ describe('Policies validation - Statement::Action_block/' +
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should succeed for *', () => {
|
test('should succeed for *', () => {
|
||||||
policy.Statement.Action = '*';
|
policy.Statement.Action = '*';
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
|
|
||||||
|
@ -393,7 +393,7 @@ describe('Policies validation - Statement::Action_block/' +
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail for **', () => {
|
test('should fail for **', () => {
|
||||||
policy.Statement.Action = '**';
|
policy.Statement.Action = '**';
|
||||||
check(policy, failRes(errDict.pattern.Action));
|
check(policy, failRes(errDict.pattern.Action));
|
||||||
|
|
||||||
|
@ -402,7 +402,7 @@ describe('Policies validation - Statement::Action_block/' +
|
||||||
check(policy, failRes(errDict.pattern.Action));
|
check(policy, failRes(errDict.pattern.Action));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail for foobar', () => {
|
test('should fail for foobar', () => {
|
||||||
policy.Statement.Action = 'foobar';
|
policy.Statement.Action = 'foobar';
|
||||||
check(policy, failRes(errDict.pattern.Action));
|
check(policy, failRes(errDict.pattern.Action));
|
||||||
|
|
||||||
|
@ -419,7 +419,7 @@ describe('Policies validation - Statement::Resource_block' +
|
||||||
policy.Statement.NotResource = undefined;
|
policy.Statement.NotResource = undefined;
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should succeed for arn:aws:s3:::*', () => {
|
test('should succeed for arn:aws:s3:::*', () => {
|
||||||
policy.Statement.Resource = 'arn:aws:s3:::*';
|
policy.Statement.Resource = 'arn:aws:s3:::*';
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
|
|
||||||
|
@ -428,7 +428,7 @@ describe('Policies validation - Statement::Resource_block' +
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should succeed for arn:aws:s3:::test/home/${aws:username}', () => {
|
test('should succeed for arn:aws:s3:::test/home/${aws:username}', () => {
|
||||||
policy.Statement.Resource = 'arn:aws:s3:::test/home/${aws:username}';
|
policy.Statement.Resource = 'arn:aws:s3:::test/home/${aws:username}';
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
|
|
||||||
|
@ -437,7 +437,7 @@ describe('Policies validation - Statement::Resource_block' +
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should succeed for arn:aws:ec2:us-west-1:1234567890:vol/*', () => {
|
test('should succeed for arn:aws:ec2:us-west-1:1234567890:vol/*', () => {
|
||||||
policy.Statement.Resource = 'arn:aws:ec2:us-west-1:1234567890:vol/*';
|
policy.Statement.Resource = 'arn:aws:ec2:us-west-1:1234567890:vol/*';
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
|
|
||||||
|
@ -446,7 +446,7 @@ describe('Policies validation - Statement::Resource_block' +
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should succeed for *', () => {
|
test('should succeed for *', () => {
|
||||||
policy.Statement.Resource = '*';
|
policy.Statement.Resource = '*';
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
|
|
||||||
|
@ -455,7 +455,7 @@ describe('Policies validation - Statement::Resource_block' +
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail for arn:aws:ec2:us-west-1:vol/* - missing region', () => {
|
test('should fail for arn:aws:ec2:us-west-1:vol/* - missing region', () => {
|
||||||
policy.Statement.Resource = 'arn:aws:ec2:1234567890:vol/*';
|
policy.Statement.Resource = 'arn:aws:ec2:1234567890:vol/*';
|
||||||
check(policy, failRes(errDict.pattern.Resource));
|
check(policy, failRes(errDict.pattern.Resource));
|
||||||
|
|
||||||
|
@ -464,7 +464,7 @@ describe('Policies validation - Statement::Resource_block' +
|
||||||
check(policy, failRes(errDict.pattern.Resource));
|
check(policy, failRes(errDict.pattern.Resource));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail for arn:aws:ec2:us-west-1:123456789:v/${} - ${}', () => {
|
test('should fail for arn:aws:ec2:us-west-1:123456789:v/${} - ${}', () => {
|
||||||
policy.Statement.Resource = 'arn:aws:ec2:us-west-1:123456789:v/${}';
|
policy.Statement.Resource = 'arn:aws:ec2:us-west-1:123456789:v/${}';
|
||||||
check(policy, failRes(errDict.pattern.Resource));
|
check(policy, failRes(errDict.pattern.Resource));
|
||||||
|
|
||||||
|
@ -473,7 +473,7 @@ describe('Policies validation - Statement::Resource_block' +
|
||||||
check(policy, failRes(errDict.pattern.Resource));
|
check(policy, failRes(errDict.pattern.Resource));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail for ec2:us-west-1:qwerty:vol/* - missing arn:aws:', () => {
|
test('should fail for ec2:us-west-1:qwerty:vol/* - missing arn:aws:', () => {
|
||||||
policy.Statement.Resource = 'ec2:us-west-1:123456789012:vol/*';
|
policy.Statement.Resource = 'ec2:us-west-1:123456789012:vol/*';
|
||||||
check(policy, failRes(errDict.pattern.Resource));
|
check(policy, failRes(errDict.pattern.Resource));
|
||||||
|
|
||||||
|
@ -482,18 +482,18 @@ describe('Policies validation - Statement::Resource_block' +
|
||||||
check(policy, failRes(errDict.pattern.Resource));
|
check(policy, failRes(errDict.pattern.Resource));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail for empty list of resources', () => {
|
test('should fail for empty list of resources', () => {
|
||||||
policy.Statement.Resource = [];
|
policy.Statement.Resource = [];
|
||||||
check(policy, failRes(errDict.minItems.Resource));
|
check(policy, failRes(errDict.minItems.Resource));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Policies validation - Statement::Condition_block', () => {
|
describe('Policies validation - Statement::Condition_block', () => {
|
||||||
it('should succeed for single Condition', () => {
|
test('should succeed for single Condition', () => {
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should succeed for multiple Conditions', () => {
|
test('should succeed for multiple Conditions', () => {
|
||||||
policy.Statement.Condition = {
|
policy.Statement.Condition = {
|
||||||
StringNotLike: { 's3:prefix': ['Development/*'] },
|
StringNotLike: { 's3:prefix': ['Development/*'] },
|
||||||
Null: { 's3:prefix': false },
|
Null: { 's3:prefix': false },
|
||||||
|
@ -501,19 +501,19 @@ describe('Policies validation - Statement::Condition_block', () => {
|
||||||
check(policy, successRes);
|
check(policy, successRes);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail when Condition is not an Object', () => {
|
test('should fail when Condition is not an Object', () => {
|
||||||
policy.Statement.Condition = 'NumericLessThanEquals';
|
policy.Statement.Condition = 'NumericLessThanEquals';
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail for an invalid Condition', () => {
|
test('should fail for an invalid Condition', () => {
|
||||||
policy.Statement.Condition = {
|
policy.Statement.Condition = {
|
||||||
SomethingLike: { 's3:prefix': ['Development/*'] },
|
SomethingLike: { 's3:prefix': ['Development/*'] },
|
||||||
};
|
};
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail when one of the multiple conditions is invalid', () => {
|
test('should fail when one of the multiple conditions is invalid', () => {
|
||||||
policy.Statement.Condition = {
|
policy.Statement.Condition = {
|
||||||
Null: { 's3:prefix': false },
|
Null: { 's3:prefix': false },
|
||||||
SomethingLike: { 's3:prefix': ['Development/*'] },
|
SomethingLike: { 's3:prefix': ['Development/*'] },
|
||||||
|
@ -521,7 +521,7 @@ describe('Policies validation - Statement::Condition_block', () => {
|
||||||
check(policy, failRes());
|
check(policy, failRes());
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should fail when invalid property is assigned', () => {
|
test('should fail when invalid property is assigned', () => {
|
||||||
policy.Condition = {
|
policy.Condition = {
|
||||||
SomethingLike: { 's3:prefix': ['Development/*'] },
|
SomethingLike: { 's3:prefix': ['Development/*'] },
|
||||||
};
|
};
|
||||||
|
|
|
@ -289,10 +289,10 @@ describe('Principal evaluator', () => {
|
||||||
},
|
},
|
||||||
result: 'Deny',
|
result: 'Deny',
|
||||||
},
|
},
|
||||||
].forEach(test => {
|
].forEach(t => {
|
||||||
it(`_evaluatePrincipalField(): ${test.name}`, () => {
|
test(`_evaluatePrincipalField(): ${t.name}`, () => {
|
||||||
assert.strictEqual(Principal._evaluatePrincipalField(defaultParams,
|
assert.strictEqual(Principal._evaluatePrincipalField(defaultParams,
|
||||||
test.statement, test.valids), test.result);
|
t.statement, t.valids), t.result);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -348,17 +348,17 @@ describe('Principal evaluator', () => {
|
||||||
valids: defaultValids,
|
valids: defaultValids,
|
||||||
result: 'Deny',
|
result: 'Deny',
|
||||||
},
|
},
|
||||||
].forEach(test => {
|
].forEach(t => {
|
||||||
it(`_evaluatePrincipal(): ${test.name}`, () => {
|
test(`_evaluatePrincipal(): ${t.name}`, () => {
|
||||||
const params = {
|
const params = {
|
||||||
log: defaultParams.log,
|
log: defaultParams.log,
|
||||||
trustedPolicy: {
|
trustedPolicy: {
|
||||||
Statement: test.statement,
|
Statement: t.statement,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
const valids = test.valids;
|
const valids = t.valids;
|
||||||
assert.strictEqual(Principal._evaluatePrincipal(params, valids),
|
assert.strictEqual(Principal._evaluatePrincipal(params, valids),
|
||||||
test.result);
|
t.result);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -621,27 +621,27 @@ describe('Principal evaluator', () => {
|
||||||
checkAction: true,
|
checkAction: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
].forEach(test => {
|
].forEach(t => {
|
||||||
it(`evaluatePrincipal(): ${test.name}`, () => {
|
test(`evaluatePrincipal(): ${t.name}`, () => {
|
||||||
const rc = new RequestContext({}, {}, '', '', '127.0.0.1',
|
const rc = new RequestContext({}, {}, '', '', '127.0.0.1',
|
||||||
false, 'assumeRole', 'sts', null, {
|
false, 'assumeRole', 'sts', null, {
|
||||||
accountid: test.requester.accountId,
|
accountid: t.requester.accountId,
|
||||||
arn: test.requester.arn,
|
arn: t.requester.arn,
|
||||||
parentArn: test.requester.parentArn,
|
parentArn: t.requester.parentArn,
|
||||||
principalType: test.requester.userType,
|
principalType: t.requester.userType,
|
||||||
externalId: '4321',
|
externalId: '4321',
|
||||||
}, 'v4', 'V4');
|
}, 'v4', 'V4');
|
||||||
|
|
||||||
const params = {
|
const params = {
|
||||||
log: defaultParams.log,
|
log: defaultParams.log,
|
||||||
trustedPolicy: {
|
trustedPolicy: {
|
||||||
Statement: test.statement,
|
Statement: t.statement,
|
||||||
},
|
},
|
||||||
rc,
|
rc,
|
||||||
targetAccountId: test.target.accountId,
|
targetAccountId: t.target.accountId,
|
||||||
};
|
};
|
||||||
const result = Principal.evaluatePrincipal(params);
|
const result = Principal.evaluatePrincipal(params);
|
||||||
assert.deepStrictEqual(result, test.result);
|
assert.deepStrictEqual(result, t.result);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -19,15 +19,15 @@ function testMD5(payload, expectedMD5, done) {
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('utilities.MD5Sum', () => {
|
describe('utilities.MD5Sum', () => {
|
||||||
it('should work on empty request', done => {
|
test('should work on empty request', done => {
|
||||||
testMD5('', constants.emptyFileMd5, done);
|
testMD5('', constants.emptyFileMd5, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should work on SAY GRRRR!!! request', done => {
|
test('should work on SAY GRRRR!!! request', done => {
|
||||||
testMD5('SAY GRRRR!!!', '986eb4a201192e8b1723a42c1468fb4e', done);
|
testMD5('SAY GRRRR!!!', '986eb4a201192e8b1723a42c1468fb4e', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should work on multiple MiB data stream', done => {
|
test('should work on multiple MiB data stream', done => {
|
||||||
/*
|
/*
|
||||||
* relies on a zero filled buffer and
|
* relies on a zero filled buffer and
|
||||||
* split content in order to get multiple calls of _transform()
|
* split content in order to get multiple calls of _transform()
|
||||||
|
|
|
@ -38,7 +38,7 @@ describe('s3middleware SubStreamInterface.stopStreaming()', () => {
|
||||||
subStreamInterface._currentStream.on('finish', () => {
|
subStreamInterface._currentStream.on('finish', () => {
|
||||||
eventsEmitted.currentStreamEnded = eventSequence++;
|
eventsEmitted.currentStreamEnded = eventSequence++;
|
||||||
});
|
});
|
||||||
it('should stop streaming data and end current stream', done => {
|
test('should stop streaming data and end current stream', done => {
|
||||||
sourceStream.on('data', chunk => {
|
sourceStream.on('data', chunk => {
|
||||||
const currentLength = subStreamInterface.getLengthCounter();
|
const currentLength = subStreamInterface.getLengthCounter();
|
||||||
if (currentLength === 10) {
|
if (currentLength === 10) {
|
||||||
|
|
|
@ -49,19 +49,19 @@ const subPartInfoTests = [
|
||||||
];
|
];
|
||||||
|
|
||||||
describe('s3middleware Azure MPU helper utility function', () => {
|
describe('s3middleware Azure MPU helper utility function', () => {
|
||||||
padStringTests.forEach(test => {
|
padStringTests.forEach(t => {
|
||||||
it(`padString should pad a ${test.category}`, done => {
|
test(`padString should pad a ${t.category}`, done => {
|
||||||
const result = test.strings.map(str =>
|
const result = t.strings.map(str =>
|
||||||
padString(str, test.category));
|
padString(str, t.category));
|
||||||
assert.deepStrictEqual(result, test.expectedResults);
|
assert.deepStrictEqual(result, t.expectedResults);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
subPartInfoTests.forEach(test => {
|
subPartInfoTests.forEach(t => {
|
||||||
const { desc, size, expectedNumberSubParts, expectedLastPartSize }
|
const { desc, size, expectedNumberSubParts, expectedLastPartSize }
|
||||||
= test;
|
= t;
|
||||||
it('getSubPartInfo should return correct result for ' +
|
test('getSubPartInfo should return correct result for ' +
|
||||||
`dataContentLength of ${desc}`, done => {
|
`dataContentLength of ${desc}`, done => {
|
||||||
const result = getSubPartInfo(size);
|
const result = getSubPartInfo(size);
|
||||||
const expectedLastPartIndex = expectedNumberSubParts - 1;
|
const expectedLastPartIndex = expectedNumberSubParts - 1;
|
||||||
|
|
|
@ -22,10 +22,10 @@ describe('s3middleware.NullStream', () => {
|
||||||
for (let i = 0; i < nullChunks.length; ++i) {
|
for (let i = 0; i < nullChunks.length; ++i) {
|
||||||
const size = nullChunks[i].size;
|
const size = nullChunks[i].size;
|
||||||
const md5sum = nullChunks[i].md5sum;
|
const md5sum = nullChunks[i].md5sum;
|
||||||
it(`should generate ${size} null bytes by size`, done => {
|
test(`should generate ${size} null bytes by size`, done => {
|
||||||
testNullChunk(size, null, md5sum, done);
|
testNullChunk(size, null, md5sum, done);
|
||||||
});
|
});
|
||||||
it(`should generate ${size} null bytes by range`, done => {
|
test(`should generate ${size} null bytes by range`, done => {
|
||||||
const dummyOffset = 9320954;
|
const dummyOffset = 9320954;
|
||||||
testNullChunk(0, [dummyOffset, dummyOffset + size - 1],
|
testNullChunk(0, [dummyOffset, dummyOffset + size - 1],
|
||||||
md5sum, done);
|
md5sum, done);
|
||||||
|
|
|
@ -8,14 +8,14 @@ const hexHash = 'd41d8cd98f00b204e9800998ecf8427e';
|
||||||
const base64Hash = '1B2M2Y8AsgTpgAmY7PhCfg==';
|
const base64Hash = '1B2M2Y8AsgTpgAmY7PhCfg==';
|
||||||
|
|
||||||
describe('s3middleware object utilites', () => {
|
describe('s3middleware object utilites', () => {
|
||||||
it('should convert hexademal MD5 to base 64', done => {
|
test('should convert hexademal MD5 to base 64', done => {
|
||||||
const hash = crypto.createHash('md5').digest('hex');
|
const hash = crypto.createHash('md5').digest('hex');
|
||||||
const convertedHash = objectUtils.getBase64MD5(hash);
|
const convertedHash = objectUtils.getBase64MD5(hash);
|
||||||
assert.strictEqual(convertedHash, base64Hash);
|
assert.strictEqual(convertedHash, base64Hash);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should convert base 64 MD5 to hexadecimal', done => {
|
test('should convert base 64 MD5 to hexadecimal', done => {
|
||||||
const hash = crypto.createHash('md5').digest('base64');
|
const hash = crypto.createHash('md5').digest('base64');
|
||||||
const convertedHash = objectUtils.getHexMD5(hash);
|
const convertedHash = objectUtils.getHexMD5(hash);
|
||||||
assert.strictEqual(convertedHash, hexHash);
|
assert.strictEqual(convertedHash, hexHash);
|
||||||
|
|
|
@ -117,7 +117,7 @@ function _addCases(dominantHeader, recessiveHeader) {
|
||||||
_addCases('if-none-match', 'if-modified-since');
|
_addCases('if-none-match', 'if-modified-since');
|
||||||
|
|
||||||
function checkSuccess(h) {
|
function checkSuccess(h) {
|
||||||
it('should succeed when value meets condition', () => {
|
test('should succeed when value meets condition', () => {
|
||||||
const headers = {};
|
const headers = {};
|
||||||
headers[h] = basicTestCases[h].success;
|
headers[h] = basicTestCases[h].success;
|
||||||
const result =
|
const result =
|
||||||
|
@ -127,7 +127,7 @@ function checkSuccess(h) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkFailure(h) {
|
function checkFailure(h) {
|
||||||
it('should fail when value does not meet condition', () => {
|
test('should fail when value does not meet condition', () => {
|
||||||
const headers = {};
|
const headers = {};
|
||||||
headers[h] = basicTestCases[h].fail;
|
headers[h] = basicTestCases[h].fail;
|
||||||
const result =
|
const result =
|
||||||
|
@ -140,7 +140,7 @@ function checkFailure(h) {
|
||||||
|
|
||||||
function checkCaseResult(testCase) {
|
function checkCaseResult(testCase) {
|
||||||
const h = Object.keys(testCase.headers);
|
const h = Object.keys(testCase.headers);
|
||||||
it(`"${h[0]}" ${testCase.headers[h[0]]} and "${h[1]}" ` +
|
test(`"${h[0]}" ${testCase.headers[h[0]]} and "${h[1]}" ` +
|
||||||
`${testCase.headers[h[1]]}: should return ${testCase.result}`, () => {
|
`${testCase.headers[h[1]]}: should return ${testCase.result}`, () => {
|
||||||
const testHeaders = {};
|
const testHeaders = {};
|
||||||
h.forEach(key => {
|
h.forEach(key => {
|
||||||
|
@ -179,25 +179,26 @@ describe('_checkEtagMatch function :', () => {
|
||||||
};
|
};
|
||||||
const listOfValues = eTagMatchValues.map(item => item.value).join();
|
const listOfValues = eTagMatchValues.map(item => item.value).join();
|
||||||
eTagMatchValues.forEach(item => {
|
eTagMatchValues.forEach(item => {
|
||||||
it(`should return success for ${item.desc}`, () => {
|
test(`should return success for ${item.desc}`, () => {
|
||||||
const result = _checkEtagMatch(item.value, contentMD5);
|
const result = _checkEtagMatch(item.value, contentMD5);
|
||||||
assert.deepStrictEqual(result, expectedSuccess);
|
assert.deepStrictEqual(result, expectedSuccess);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return success for multiple valid values', () => {
|
test('should return success for multiple valid values', () => {
|
||||||
const result = _checkEtagMatch(listOfValues, contentMD5);
|
const result = _checkEtagMatch(listOfValues, contentMD5);
|
||||||
assert.deepStrictEqual(result, expectedSuccess);
|
assert.deepStrictEqual(result, expectedSuccess);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return success for multiple valid values with comma at index 0',
|
test(
|
||||||
|
'should return success for multiple valid values with comma at index 0',
|
||||||
() => {
|
() => {
|
||||||
const result = _checkEtagMatch(`,${listOfValues}`, contentMD5);
|
const result = _checkEtagMatch(`,${listOfValues}`, contentMD5);
|
||||||
assert.deepStrictEqual(result, expectedSuccess);
|
assert.deepStrictEqual(result, expectedSuccess);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return success as long as one value in list is valid',
|
test('should return success as long as one value in list is valid', () => {
|
||||||
() => {
|
|
||||||
const result = _checkEtagMatch(`${listOfValues},aaa`, contentMD5);
|
const result = _checkEtagMatch(`${listOfValues},aaa`, contentMD5);
|
||||||
assert.deepStrictEqual(result, expectedSuccess);
|
assert.deepStrictEqual(result, expectedSuccess);
|
||||||
});
|
});
|
||||||
|
@ -206,9 +207,9 @@ describe('_checkEtagMatch function :', () => {
|
||||||
{ desc: 'if only value does not match', value: 'aaa' },
|
{ desc: 'if only value does not match', value: 'aaa' },
|
||||||
{ desc: 'for list of non-matching values', value: 'aaa,bbb,ccc' },
|
{ desc: 'for list of non-matching values', value: 'aaa,bbb,ccc' },
|
||||||
];
|
];
|
||||||
failTests.forEach(test => {
|
failTests.forEach(t => {
|
||||||
it(`should return PreconditionFailed ${test.desc}`, () => {
|
test(`should return PreconditionFailed ${t.desc}`, () => {
|
||||||
const result = _checkEtagMatch(test.value, contentMD5);
|
const result = _checkEtagMatch(t.value, contentMD5);
|
||||||
assert.deepStrictEqual(result.error, errors.PreconditionFailed);
|
assert.deepStrictEqual(result.error, errors.PreconditionFailed);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -216,26 +217,25 @@ describe('_checkEtagMatch function :', () => {
|
||||||
|
|
||||||
describe('_checkEtagNoneMatch function :', () => {
|
describe('_checkEtagNoneMatch function :', () => {
|
||||||
eTagMatchValues.forEach(item => {
|
eTagMatchValues.forEach(item => {
|
||||||
it(`should return NotModified for ${item.desc}`, () => {
|
test(`should return NotModified for ${item.desc}`, () => {
|
||||||
const result = _checkEtagNoneMatch(item.value, contentMD5);
|
const result = _checkEtagNoneMatch(item.value, contentMD5);
|
||||||
assert.deepStrictEqual(result.error, errors.NotModified);
|
assert.deepStrictEqual(result.error, errors.NotModified);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should return NotModified if ${item.desc} is in a list of ` +
|
test(`should return NotModified if ${item.desc} is in a list of ` +
|
||||||
'otherwise non-matching values',
|
'otherwise non-matching values', () => {
|
||||||
() => {
|
|
||||||
const result = _checkEtagNoneMatch(`aaa,${item.value},bbb`,
|
const result = _checkEtagNoneMatch(`aaa,${item.value},bbb`,
|
||||||
contentMD5);
|
contentMD5);
|
||||||
assert.deepStrictEqual(result.error, errors.NotModified);
|
assert.deepStrictEqual(result.error, errors.NotModified);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return success for multiple non-matching values', () => {
|
test('should return success for multiple non-matching values', () => {
|
||||||
const result = _checkEtagNoneMatch('aaa,bbb,ccc', contentMD5);
|
const result = _checkEtagNoneMatch('aaa,bbb,ccc', contentMD5);
|
||||||
assert.deepStrictEqual(result, expectedSuccess);
|
assert.deepStrictEqual(result, expectedSuccess);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return success for multiple non-matching values ' +
|
test('should return success for multiple non-matching values ' +
|
||||||
'with comma at index 0', () => {
|
'with comma at index 0', () => {
|
||||||
const result = _checkEtagNoneMatch(',aaa,bbb,ccc', contentMD5);
|
const result = _checkEtagNoneMatch(',aaa,bbb,ccc', contentMD5);
|
||||||
assert.deepStrictEqual(result, expectedSuccess);
|
assert.deepStrictEqual(result, expectedSuccess);
|
||||||
|
@ -243,50 +243,57 @@ describe('_checkEtagNoneMatch function :', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('_checkModifiedSince function :', () => {
|
describe('_checkModifiedSince function :', () => {
|
||||||
it('should return InvalidArgument if header has invalid value', () => {
|
test('should return InvalidArgument if header has invalid value', () => {
|
||||||
const result = _checkModifiedSince('aaaa', lastModified);
|
const result = _checkModifiedSince('aaaa', lastModified);
|
||||||
assert.deepStrictEqual(result.error, errors.InvalidArgument);
|
assert.deepStrictEqual(result.error, errors.InvalidArgument);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return success if header value is earlier to than last modified',
|
test(
|
||||||
|
'should return success if header value is earlier to than last modified',
|
||||||
() => {
|
() => {
|
||||||
const result = _checkModifiedSince(beforeLastModified, lastModified);
|
const result = _checkModifiedSince(beforeLastModified, lastModified);
|
||||||
assert.deepStrictEqual(result, expectedSuccess);
|
assert.deepStrictEqual(result, expectedSuccess);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return NotModified if header value is later than last modified',
|
test(
|
||||||
|
'should return NotModified if header value is later than last modified',
|
||||||
() => {
|
() => {
|
||||||
const result = _checkModifiedSince(afterLastModified, lastModified);
|
const result = _checkModifiedSince(afterLastModified, lastModified);
|
||||||
assert.deepStrictEqual(result.error, errors.NotModified);
|
assert.deepStrictEqual(result.error, errors.NotModified);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return NotModified if header value is equal to last modified',
|
test(
|
||||||
|
'should return NotModified if header value is equal to last modified',
|
||||||
() => {
|
() => {
|
||||||
const result = _checkModifiedSince(lastModified, lastModified);
|
const result = _checkModifiedSince(lastModified, lastModified);
|
||||||
assert.deepStrictEqual(result.error, errors.NotModified);
|
assert.deepStrictEqual(result.error, errors.NotModified);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('_checkUnmodifiedSince function :', () => {
|
describe('_checkUnmodifiedSince function :', () => {
|
||||||
it('should return InvalidArgument if header has invalid value', () => {
|
test('should return InvalidArgument if header has invalid value', () => {
|
||||||
const result = _checkUnmodifiedSince('aaaa', lastModified);
|
const result = _checkUnmodifiedSince('aaaa', lastModified);
|
||||||
assert.deepStrictEqual(result.error, errors.InvalidArgument);
|
assert.deepStrictEqual(result.error, errors.InvalidArgument);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return PreconditionFailed if header value is earlier than ' +
|
test('should return PreconditionFailed if header value is earlier than ' +
|
||||||
'last modified', () => {
|
'last modified', () => {
|
||||||
const result = _checkUnmodifiedSince(beforeLastModified, lastModified);
|
const result = _checkUnmodifiedSince(beforeLastModified, lastModified);
|
||||||
assert.deepStrictEqual(result.error, errors.PreconditionFailed);
|
assert.deepStrictEqual(result.error, errors.PreconditionFailed);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return success if header value is later to than last modified',
|
test(
|
||||||
|
'should return success if header value is later to than last modified',
|
||||||
() => {
|
() => {
|
||||||
const result = _checkUnmodifiedSince(afterLastModified, lastModified);
|
const result = _checkUnmodifiedSince(afterLastModified, lastModified);
|
||||||
assert.deepStrictEqual(result, expectedSuccess);
|
assert.deepStrictEqual(result, expectedSuccess);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return success if header value is equal to last modified',
|
test('should return success if header value is equal to last modified', () => {
|
||||||
() => {
|
|
||||||
const result = _checkUnmodifiedSince(lastModified, lastModified);
|
const result = _checkUnmodifiedSince(lastModified, lastModified);
|
||||||
assert.deepStrictEqual(result, expectedSuccess);
|
assert.deepStrictEqual(result, expectedSuccess);
|
||||||
});
|
});
|
||||||
|
|
|
@ -24,7 +24,7 @@ const validHosts = [
|
||||||
];
|
];
|
||||||
|
|
||||||
describe('routesUtils.getBucketNameFromHost', () => {
|
describe('routesUtils.getBucketNameFromHost', () => {
|
||||||
it('should extract valid buckets for one endpoint', () => {
|
test('should extract valid buckets for one endpoint', () => {
|
||||||
[
|
[
|
||||||
'b', 'mybucket',
|
'b', 'mybucket',
|
||||||
'buck-et', '-buck-et', 'buck-et-',
|
'buck-et', '-buck-et', 'buck-et-',
|
||||||
|
@ -40,7 +40,7 @@ describe('routesUtils.getBucketNameFromHost', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should also accept website endpoints', () => {
|
test('should also accept website endpoints', () => {
|
||||||
[
|
[
|
||||||
'in-french.bucket.is-seau.s3-website-eu-west-1.amazonaws.com',
|
'in-french.bucket.is-seau.s3-website-eu-west-1.amazonaws.com',
|
||||||
'in-french.bucket.is-seau.s3-website-us-east-1.amazonaws.com',
|
'in-french.bucket.is-seau.s3-website-us-east-1.amazonaws.com',
|
||||||
|
@ -55,7 +55,7 @@ describe('routesUtils.getBucketNameFromHost', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return undefined when non dns-style', () => {
|
test('should return undefined when non dns-style', () => {
|
||||||
[
|
[
|
||||||
's3.amazonaws.com',
|
's3.amazonaws.com',
|
||||||
].forEach(host => {
|
].forEach(host => {
|
||||||
|
@ -66,7 +66,7 @@ describe('routesUtils.getBucketNameFromHost', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return undefined when IP addresses', () => {
|
test('should return undefined when IP addresses', () => {
|
||||||
[
|
[
|
||||||
'127.0.0.1',
|
'127.0.0.1',
|
||||||
'8.8.8.8',
|
'8.8.8.8',
|
||||||
|
@ -82,7 +82,7 @@ describe('routesUtils.getBucketNameFromHost', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should throw when bad request', () => {
|
test('should throw when bad request', () => {
|
||||||
[
|
[
|
||||||
{},
|
{},
|
||||||
{ host: '' },
|
{ host: '' },
|
||||||
|
|
|
@ -5,13 +5,13 @@ const bannedStr = 'banned';
|
||||||
const prefixBlacklist = [];
|
const prefixBlacklist = [];
|
||||||
|
|
||||||
describe('routesUtils.isValidBucketName', () => {
|
describe('routesUtils.isValidBucketName', () => {
|
||||||
it('should return false if bucketname is fewer than ' +
|
test('should return false if bucketname is fewer than ' +
|
||||||
'3 characters long', () => {
|
'3 characters long', () => {
|
||||||
const result = routesUtils.isValidBucketName('no', prefixBlacklist);
|
const result = routesUtils.isValidBucketName('no', prefixBlacklist);
|
||||||
assert.strictEqual(result, false);
|
assert.strictEqual(result, false);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return false if bucketname is greater than ' +
|
test('should return false if bucketname is greater than ' +
|
||||||
'63 characters long', () => {
|
'63 characters long', () => {
|
||||||
const longString = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' +
|
const longString = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' +
|
||||||
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa';
|
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa';
|
||||||
|
@ -20,39 +20,39 @@ describe('routesUtils.isValidBucketName', () => {
|
||||||
assert.strictEqual(result, false);
|
assert.strictEqual(result, false);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return false if bucketname contains capital letters ' +
|
test('should return false if bucketname contains capital letters ' +
|
||||||
'and is not whitelisted', () => {
|
'and is not whitelisted', () => {
|
||||||
const result =
|
const result =
|
||||||
routesUtils.isValidBucketName('noSHOUTING', prefixBlacklist);
|
routesUtils.isValidBucketName('noSHOUTING', prefixBlacklist);
|
||||||
assert.strictEqual(result, false);
|
assert.strictEqual(result, false);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return true if bucketname contains capital letters ' +
|
test('should return true if bucketname contains capital letters ' +
|
||||||
'but is whitelisted', () => {
|
'but is whitelisted', () => {
|
||||||
const result =
|
const result =
|
||||||
routesUtils.isValidBucketName('METADATA', prefixBlacklist);
|
routesUtils.isValidBucketName('METADATA', prefixBlacklist);
|
||||||
assert.strictEqual(result, true);
|
assert.strictEqual(result, true);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return false if bucketname starts w/ blacklisted prefix', () => {
|
test('should return false if bucketname starts w/ blacklisted prefix', () => {
|
||||||
const result =
|
const result =
|
||||||
routesUtils.isValidBucketName('bannedbucket', [bannedStr]);
|
routesUtils.isValidBucketName('bannedbucket', [bannedStr]);
|
||||||
assert.strictEqual(result, false);
|
assert.strictEqual(result, false);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return false if bucketname is an IP address', () => {
|
test('should return false if bucketname is an IP address', () => {
|
||||||
const result =
|
const result =
|
||||||
routesUtils.isValidBucketName('172.16.254.1', prefixBlacklist);
|
routesUtils.isValidBucketName('172.16.254.1', prefixBlacklist);
|
||||||
assert.strictEqual(result, false);
|
assert.strictEqual(result, false);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return false if bucketname is not DNS compatible', () => {
|
test('should return false if bucketname is not DNS compatible', () => {
|
||||||
const result =
|
const result =
|
||||||
routesUtils.isValidBucketName('*notvalid*', prefixBlacklist);
|
routesUtils.isValidBucketName('*notvalid*', prefixBlacklist);
|
||||||
assert.strictEqual(result, false);
|
assert.strictEqual(result, false);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return true if bucketname does not break rules', () => {
|
test('should return true if bucketname does not break rules', () => {
|
||||||
const result = routesUtils.isValidBucketName('okay', prefixBlacklist);
|
const result = routesUtils.isValidBucketName('okay', prefixBlacklist);
|
||||||
assert.strictEqual(result, true);
|
assert.strictEqual(result, true);
|
||||||
});
|
});
|
||||||
|
|
|
@ -13,7 +13,7 @@ const validHosts = [
|
||||||
];
|
];
|
||||||
|
|
||||||
describe('routesUtils.normalizeRequest', () => {
|
describe('routesUtils.normalizeRequest', () => {
|
||||||
it('should parse bucket name from path', () => {
|
test('should parse bucket name from path', () => {
|
||||||
const request = {
|
const request = {
|
||||||
url: `/${bucketName}`,
|
url: `/${bucketName}`,
|
||||||
headers: { host: 's3.amazonaws.com' },
|
headers: { host: 's3.amazonaws.com' },
|
||||||
|
@ -23,7 +23,7 @@ describe('routesUtils.normalizeRequest', () => {
|
||||||
assert.strictEqual(result.parsedHost, 's3.amazonaws.com');
|
assert.strictEqual(result.parsedHost, 's3.amazonaws.com');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should parse bucket name from path when no slash', () => {
|
test('should parse bucket name from path when no slash', () => {
|
||||||
const request = {
|
const request = {
|
||||||
url: `${bucketName}`,
|
url: `${bucketName}`,
|
||||||
headers: { host: 's3.amazonaws.com' },
|
headers: { host: 's3.amazonaws.com' },
|
||||||
|
@ -33,7 +33,7 @@ describe('routesUtils.normalizeRequest', () => {
|
||||||
assert.strictEqual(result.parsedHost, 's3.amazonaws.com');
|
assert.strictEqual(result.parsedHost, 's3.amazonaws.com');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should parse bucket name from host', () => {
|
test('should parse bucket name from host', () => {
|
||||||
const request = {
|
const request = {
|
||||||
url: '/',
|
url: '/',
|
||||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||||
|
@ -43,7 +43,7 @@ describe('routesUtils.normalizeRequest', () => {
|
||||||
assert.strictEqual(result.parsedHost, 's3.amazonaws.com');
|
assert.strictEqual(result.parsedHost, 's3.amazonaws.com');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should parse bucket and object name from path', () => {
|
test('should parse bucket and object name from path', () => {
|
||||||
const request = {
|
const request = {
|
||||||
url: `/${bucketName}/${objName}`,
|
url: `/${bucketName}/${objName}`,
|
||||||
headers: { host: 's3.amazonaws.com' },
|
headers: { host: 's3.amazonaws.com' },
|
||||||
|
@ -54,7 +54,7 @@ describe('routesUtils.normalizeRequest', () => {
|
||||||
assert.strictEqual(result.parsedHost, 's3.amazonaws.com');
|
assert.strictEqual(result.parsedHost, 's3.amazonaws.com');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should parse bucket and object name from path with IP address', () => {
|
test('should parse bucket and object name from path with IP address', () => {
|
||||||
const request = {
|
const request = {
|
||||||
url: `/${bucketName}/${objName}`,
|
url: `/${bucketName}/${objName}`,
|
||||||
headers: { host: '[::1]' },
|
headers: { host: '[::1]' },
|
||||||
|
@ -65,7 +65,7 @@ describe('routesUtils.normalizeRequest', () => {
|
||||||
assert.strictEqual(result.parsedHost, '[::1]');
|
assert.strictEqual(result.parsedHost, '[::1]');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should parse bucket name from host ' +
|
test('should parse bucket name from host ' +
|
||||||
'and object name from path', () => {
|
'and object name from path', () => {
|
||||||
const request = {
|
const request = {
|
||||||
url: `/${objName}`,
|
url: `/${objName}`,
|
||||||
|
|
|
@ -5,7 +5,7 @@ const assert = require('assert');
|
||||||
const shuffle = require('../../index').shuffle;
|
const shuffle = require('../../index').shuffle;
|
||||||
|
|
||||||
describe('Shuffle', () => {
|
describe('Shuffle', () => {
|
||||||
it('should fail less than 0.005% times', done => {
|
test('should fail less than 0.005% times', done => {
|
||||||
let array = [];
|
let array = [];
|
||||||
const reference = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0];
|
const reference = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0];
|
||||||
let fails = 0;
|
let fails = 0;
|
||||||
|
|
|
@ -117,16 +117,16 @@ describe('raft record log client', () => {
|
||||||
done();
|
done();
|
||||||
}
|
}
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
setup(done);
|
setup(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('readRecords', () => {
|
describe('readRecords', () => {
|
||||||
it('should list all records in a log', done => {
|
test('should list all records in a log', done => {
|
||||||
let nbRecords = 0;
|
let nbRecords = 0;
|
||||||
logClient.readRecords({}, (err, info) => {
|
logClient.readRecords({}, (err, info) => {
|
||||||
const recordStream = info.log;
|
const recordStream = info.log;
|
||||||
|
@ -160,7 +160,7 @@ describe('raft record log client', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
describe('error cases', () => {
|
describe('error cases', () => {
|
||||||
it('should handle 404 error gracefully', done => {
|
test('should handle 404 error gracefully', done => {
|
||||||
const logClient = new LogConsumer({ bucketClient,
|
const logClient = new LogConsumer({ bucketClient,
|
||||||
raftSession: 1 });
|
raftSession: 1 });
|
||||||
logClient.readRecords({}, (err, info) => {
|
logClient.readRecords({}, (err, info) => {
|
||||||
|
@ -170,7 +170,7 @@ describe('raft record log client', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should handle 416 error gracefully', done => {
|
test('should handle 416 error gracefully', done => {
|
||||||
const logClient = new LogConsumer({ bucketClient,
|
const logClient = new LogConsumer({ bucketClient,
|
||||||
raftSession: 2 });
|
raftSession: 2 });
|
||||||
logClient.readRecords({}, (err, info) => {
|
logClient.readRecords({}, (err, info) => {
|
||||||
|
@ -180,7 +180,7 @@ describe('raft record log client', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should handle other errors correctly', done => {
|
test('should handle other errors correctly', done => {
|
||||||
const logClient = new LogConsumer({ bucketClient,
|
const logClient = new LogConsumer({ bucketClient,
|
||||||
raftSession: 3 });
|
raftSession: 3 });
|
||||||
logClient.readRecords({}, err => {
|
logClient.readRecords({}, err => {
|
||||||
|
@ -189,7 +189,7 @@ describe('raft record log client', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should return error with malformed log response', done => {
|
test('should return error with malformed log response', done => {
|
||||||
const logClient = new LogConsumer({ bucketClient,
|
const logClient = new LogConsumer({ bucketClient,
|
||||||
raftSession: 4 });
|
raftSession: 4 });
|
||||||
logClient.readRecords({}, err => {
|
logClient.readRecords({}, err => {
|
||||||
|
@ -198,7 +198,7 @@ describe('raft record log client', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should emit error event if a log entry is malformed', done => {
|
test('should emit error event if a log entry is malformed', done => {
|
||||||
const logClient = new LogConsumer({ bucketClient,
|
const logClient = new LogConsumer({ bucketClient,
|
||||||
raftSession: 5 });
|
raftSession: 5 });
|
||||||
logClient.readRecords({}, (err, res) => {
|
logClient.readRecords({}, (err, res) => {
|
||||||
|
|
|
@ -66,7 +66,7 @@ describe('record log - persistent log of metadata operations', () => {
|
||||||
done();
|
done();
|
||||||
}
|
}
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
temp.mkdir('record-log-testdir-', (err, dbDir) => {
|
temp.mkdir('record-log-testdir-', (err, dbDir) => {
|
||||||
const rootDb = level(dbDir);
|
const rootDb = level(dbDir);
|
||||||
db = sublevel(rootDb);
|
db = sublevel(rootDb);
|
||||||
|
@ -74,7 +74,7 @@ describe('record log - persistent log of metadata operations', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
server.close();
|
server.close();
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
@ -97,7 +97,7 @@ describe('record log - persistent log of metadata operations', () => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list an empty record log', done => {
|
test('should list an empty record log', done => {
|
||||||
logProxy.readRecords({}, (err, res) => {
|
logProxy.readRecords({}, (err, res) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
const info = res.info;
|
const info = res.info;
|
||||||
|
@ -113,7 +113,7 @@ describe('record log - persistent log of metadata operations', () => {
|
||||||
recordStream.on('end', done);
|
recordStream.on('end', done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should be able to add records and list them thereafter', done => {
|
test('should be able to add records and list them thereafter', done => {
|
||||||
debug('going to append records');
|
debug('going to append records');
|
||||||
const ops = [{ type: 'put', key: 'foo', value: 'bar',
|
const ops = [{ type: 'put', key: 'foo', value: 'bar',
|
||||||
prefix: ['foobucket'] },
|
prefix: ['foobucket'] },
|
||||||
|
@ -189,7 +189,7 @@ describe('record log - persistent log of metadata operations', () => {
|
||||||
describe('readRecords', () => {
|
describe('readRecords', () => {
|
||||||
let logProxy;
|
let logProxy;
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
logProxy = createScratchRecordLog(cliLogger, err => {
|
logProxy = createScratchRecordLog(cliLogger, err => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
// fill the log with 1000 entries
|
// fill the log with 1000 entries
|
||||||
|
@ -238,28 +238,28 @@ describe('record log - persistent log of metadata operations', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
it('should list all entries', done => {
|
test('should list all entries', done => {
|
||||||
logProxy.readRecords({}, (err, res) => {
|
logProxy.readRecords({}, (err, res) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
checkReadRecords(res, { startSeq: 1, endSeq: 1000 }, done);
|
checkReadRecords(res, { startSeq: 1, endSeq: 1000 }, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list all entries from a given startSeq', done => {
|
test('should list all entries from a given startSeq', done => {
|
||||||
logProxy.readRecords({ startSeq: 500 }, (err, res) => {
|
logProxy.readRecords({ startSeq: 500 }, (err, res) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
checkReadRecords(res, { startSeq: 500, endSeq: 1000 }, done);
|
checkReadRecords(res, { startSeq: 500, endSeq: 1000 }, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list all entries up to a given endSeq', done => {
|
test('should list all entries up to a given endSeq', done => {
|
||||||
logProxy.readRecords({ endSeq: 500 }, (err, res) => {
|
logProxy.readRecords({ endSeq: 500 }, (err, res) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
checkReadRecords(res, { startSeq: 1, endSeq: 500 }, done);
|
checkReadRecords(res, { startSeq: 1, endSeq: 500 }, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list all entries in a seq range', done => {
|
test('should list all entries in a seq range', done => {
|
||||||
logProxy.readRecords(
|
logProxy.readRecords(
|
||||||
{ startSeq: 100, endSeq: 500 }, (err, res) => {
|
{ startSeq: 100, endSeq: 500 }, (err, res) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
@ -268,8 +268,7 @@ describe('record log - persistent log of metadata operations', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list all entries from a given startSeq up to a limit',
|
test('should list all entries from a given startSeq up to a limit', done => {
|
||||||
done => {
|
|
||||||
logProxy.readRecords(
|
logProxy.readRecords(
|
||||||
{ startSeq: 100, limit: 100 }, (err, res) => {
|
{ startSeq: 100, limit: 100 }, (err, res) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
|
|
@ -25,15 +25,15 @@ function check(array) {
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('StringHash', () => {
|
describe('StringHash', () => {
|
||||||
it('Should compute a string hash', done => {
|
test('Should compute a string hash', done => {
|
||||||
const hash1 = stringHash('Hello!');
|
const hash1 = stringHash('Hello!');
|
||||||
const hash2 = stringHash('Hello?');
|
const hash2 = stringHash('Hello?');
|
||||||
assert.notDeepStrictEqual(hash1, hash2);
|
assert.notDeepStrictEqual(hash1, hash2);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
it(`Should distribute uniformly with a maximum of ${ERROR}% of deviation`,
|
test(
|
||||||
function f(done) {
|
`Should distribute uniformly with a maximum of ${ERROR}% of deviation`,
|
||||||
this.timeout(20000);
|
done => {
|
||||||
const strings = new Array(STRING_COUNT).fill('')
|
const strings = new Array(STRING_COUNT).fill('')
|
||||||
.map(() => randomString(10));
|
.map(() => randomString(10));
|
||||||
const arr = new Array(ARRAY_LENGTH).fill(0);
|
const arr = new Array(ARRAY_LENGTH).fill(0);
|
||||||
|
@ -42,5 +42,6 @@ describe('StringHash', () => {
|
||||||
++arr[ind];
|
++arr[ind];
|
||||||
});
|
});
|
||||||
done(check(arr));
|
done(check(arr));
|
||||||
});
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
|
@ -17,7 +17,7 @@ describe('test generating versionIds', () => {
|
||||||
}
|
}
|
||||||
process.env.VID_CRYPTO_PASSWORD = randkey(64);
|
process.env.VID_CRYPTO_PASSWORD = randkey(64);
|
||||||
|
|
||||||
it('sorted in reversed chronological and alphabetical order', () => {
|
test('sorted in reversed chronological and alphabetical order', () => {
|
||||||
for (let i = 0; i < count; i++) {
|
for (let i = 0; i < count; i++) {
|
||||||
if (i !== 0) {
|
if (i !== 0) {
|
||||||
assert(vids[i - 1] > vids[i],
|
assert(vids[i - 1] > vids[i],
|
||||||
|
@ -26,13 +26,16 @@ describe('test generating versionIds', () => {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error decoding non-hex string versionIds', () => {
|
test('should return error decoding non-hex string versionIds', () => {
|
||||||
const encoded = vids.map(vid => VID.encode(vid));
|
const encoded = vids.map(vid => VID.encode(vid));
|
||||||
const decoded = encoded.map(vid => VID.decode(`${vid}foo`));
|
const decoded = encoded.map(vid => VID.decode(`${vid}foo`));
|
||||||
decoded.forEach(result => assert(result instanceof Error));
|
decoded.forEach(result => {
|
||||||
|
console.log(result)
|
||||||
|
assert(result instanceof Error);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should encode and decode versionIds', () => {
|
test('should encode and decode versionIds', () => {
|
||||||
const encoded = vids.map(vid => VID.encode(vid));
|
const encoded = vids.map(vid => VID.encode(vid));
|
||||||
const decoded = encoded.map(vid => VID.decode(vid));
|
const decoded = encoded.map(vid => VID.decode(vid));
|
||||||
assert.strictEqual(vids.length, count);
|
assert.strictEqual(vids.length, count);
|
||||||
|
|
|
@ -93,12 +93,12 @@ function batch(callback) {
|
||||||
describe('test VSP', () => {
|
describe('test VSP', () => {
|
||||||
afterEach(() => _cleanupKeyValueStore());
|
afterEach(() => _cleanupKeyValueStore());
|
||||||
|
|
||||||
it('should run a batch of operations correctly', done => {
|
test('should run a batch of operations correctly', done => {
|
||||||
async.times(THREADS,
|
async.times(THREADS,
|
||||||
(i, next) => setTimeout(() => batch(next), i), done);
|
(i, next) => setTimeout(() => batch(next), i), done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should be able to repair a PHD master version', done => {
|
test('should be able to repair a PHD master version', done => {
|
||||||
const putRequest = {
|
const putRequest = {
|
||||||
db: 'foo',
|
db: 'foo',
|
||||||
key: 'bar',
|
key: 'bar',
|
||||||
|
@ -150,7 +150,7 @@ describe('test VSP', () => {
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
it('should allow to write a specific version + update master', done => {
|
test('should allow to write a specific version + update master', done => {
|
||||||
let v1;
|
let v1;
|
||||||
let v2;
|
let v2;
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue