Compare commits

...

4 Commits

Author SHA1 Message Date
Rahul Padigela 8a57523f55 update gitignore 2019-05-08 15:38:46 -07:00
Rahul Padigela 8c8d9e2051 remove references to 'test' 2019-05-08 15:37:58 -07:00
Rahul Padigela 7416c12702 use jest, jest config 2019-05-08 15:37:23 -07:00
Rahul Padigela a8563379aa improvement: migrate to jest 2019-05-07 16:02:18 -07:00
65 changed files with 9458 additions and 1757 deletions

2
.gitignore vendored
View File

@ -10,3 +10,5 @@ node_modules/
*-linux
*-macos
# Coverage
coverage/

7
jest.config.js Normal file
View File

@ -0,0 +1,7 @@
module.exports = {
testEnvironment: 'node',
moduleFileExtensions: ['js', 'jsx', 'json', 'node'],
setupFiles: [],
setupFilesAfterEnv: ['<rootDir>/jest.postsetup.js'],
testMatch: ['**/tests/**/*.js?(x)']
};

1
jest.postsetup.js Normal file
View File

@ -0,0 +1 @@
jest.setTimeout(120000);

7743
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -41,9 +41,10 @@
},
"devDependencies": {
"eslint": "2.13.1",
"eslint-plugin-react": "^4.3.0",
"eslint-config-airbnb": "6.2.0",
"eslint-config-scality": "scality/Guidelines#71a059ad",
"eslint-plugin-react": "^4.3.0",
"jest": "^24.8.0",
"lolex": "1.5.2",
"mocha": "2.5.3",
"temp": "0.8.3"
@ -52,8 +53,8 @@
"lint": "eslint $(git ls-files '*.js')",
"lint_md": "mdlint $(git ls-files '*.md')",
"lint_yml": "yamllint $(git ls-files '*.yml')",
"test": "mocha --recursive --timeout 5500 tests/unit",
"ft_test": "find tests/functional -name \"*.js\" | grep -v \"utils/\" | xargs mocha --timeout 120000"
"test": "jest --projects jest.config.js --coverage --testPathPattern='tests/unit/[\\w/-]+\\.[tj]s' tests/unit",
"ft_test": "jest tests/functional"
},
"private": true
}

View File

@ -35,19 +35,19 @@ describe('Clustering', () => {
return done();
});
it('Should create and stop workers properly', done => {
test('Should create and stop workers properly', done => {
runTest('simple', done);
});
it('Should restart workers until clustering stopped', done => {
test('Should restart workers until clustering stopped', done => {
runTest('watchdog', done);
});
it('Should shutdown cluster if master killed', done => {
test('Should shutdown cluster if master killed', done => {
runTest('killed', done);
});
it('Should timeout shutdown of workers if not exiting properly', done => {
test('Should timeout shutdown of workers if not exiting properly', done => {
runTest('shutdownTimeout', done);
});
});

View File

@ -122,14 +122,14 @@ describe('Multipart Uploads listing algorithm', () => {
done();
});
it('should perform a listing of all keys', done => {
test('should perform a listing of all keys', done => {
const listingResult = performListing(keys, MultipartUploads,
listingParams, logger);
assert.deepStrictEqual(listingResult, expectedResult);
done();
});
it('should perform a listing with delimiter', done => {
test('should perform a listing with delimiter', done => {
const delimiter = '/';
listingParams.delimiter = delimiter;
// format result
@ -146,7 +146,7 @@ describe('Multipart Uploads listing algorithm', () => {
done();
});
it('should perform a listing with max keys', done => {
test('should perform a listing with max keys', done => {
listingParams.maxKeys = 3;
// format result
expectedResult.Uploads.pop();

View File

@ -33,15 +33,15 @@ describe('Basic listing algorithm', () => {
new Test('without parameters', undefined, data.slice(0, 10000)),
new Test('with bad parameters', 'lala', data.slice(0, 10000)),
];
tests.forEach(test => {
it(`Should list ${test.name}`, done => {
const res = performListing(data, Basic, test.input, logger);
assert.deepStrictEqual(res, test.output);
tests.forEach(t => {
test(`Should list ${t.name}`, done => {
const res = performListing(data, Basic, t.input, logger);
assert.deepStrictEqual(res, t.output);
done();
});
});
it('Should support entries with no key', () => {
test('Should support entries with no key', () => {
const res1 = performListing([{
value: '{"data":"foo"}',
}], Basic, { maxKeys: 1 }, logger);
@ -60,7 +60,7 @@ describe('Basic listing algorithm', () => {
}]);
});
it('Should support key-only listing', () => {
test('Should support key-only listing', () => {
const res = performListing(['key1', 'key2'],
Basic, { maxKeys: 1 }, logger);
assert.deepStrictEqual(res, ['key1']);

View File

@ -474,7 +474,7 @@ const alphabeticalOrderTests = [
describe('Delimiter listing algorithm', () => {
it('Should return good skipping value for DelimiterMaster', done => {
test('Should return good skipping value for DelimiterMaster', done => {
const delimiter = new DelimiterMaster({ delimiter: '/' });
for (let i = 0; i < 100; i++) {
delimiter.filter({ key: `foo/${zpad(i)}`, value: '{}' });
@ -483,69 +483,70 @@ describe('Delimiter listing algorithm', () => {
done();
});
it('Should set Delimiter alphabeticalOrder field to the expected value',
() => {
alphabeticalOrderTests.forEach(test => {
const delimiter = new Delimiter(test.params);
assert.strictEqual(delimiter.alphabeticalOrder,
test.expectedValue,
`${JSON.stringify(test.params)}`);
});
});
test(
'Should set Delimiter alphabeticalOrder field to the expected value',
() => {
alphabeticalOrderTests.forEach(test => {
const delimiter = new Delimiter(test.params);
assert.strictEqual(delimiter.alphabeticalOrder,
test.expectedValue,
`${JSON.stringify(test.params)}`);
});
}
);
tests.forEach(test => {
it(`Should list ${test.name}`, done => {
tests.forEach(t => {
test(`Should list ${t.name}`, done => {
// Simulate skip scan done by LevelDB
const d = data.filter(e => test.filter(e, test.input));
const res = performListing(d, Delimiter, test.input, logger);
assert.deepStrictEqual(res, test.output);
const d = data.filter(e => t.filter(e, t.input));
const res = performListing(d, Delimiter, t.input, logger);
assert.deepStrictEqual(res, t.output);
done();
});
});
tests.forEach(test => {
it(`Should list master versions ${test.name}`, done => {
tests.forEach(t => {
test(`Should list master versions ${t.name}`, done => {
// Simulate skip scan done by LevelDB
const d = dataVersioned.filter(e => test.filter(e, test.input));
const res = performListing(d, DelimiterMaster, test.input, logger);
assert.deepStrictEqual(res, test.output);
const d = dataVersioned.filter(e => t.filter(e, t.input));
const res = performListing(d, DelimiterMaster, t.input, logger);
assert.deepStrictEqual(res, t.output);
done();
});
});
it('Should filter values according to alphabeticalOrder parameter',
() => {
let test = new Test('alphabeticalOrder parameter set', {
delimiter: '/',
alphabeticalOrder: true,
}, {
Contents: [
receivedNonAlphaData[0],
],
Delimiter: '/',
CommonPrefixes: [],
IsTruncated: false,
NextMarker: undefined,
});
let d = nonAlphabeticalData.filter(e => test.filter(e, test.input));
let res = performListing(d, Delimiter, test.input, logger);
assert.deepStrictEqual(res, test.output);
test('Should filter values according to alphabeticalOrder parameter', () => {
let t = new Test('alphabeticalOrder parameter set', {
delimiter: '/',
alphabeticalOrder: true,
}, {
Contents: [
receivedNonAlphaData[0],
],
Delimiter: '/',
CommonPrefixes: [],
IsTruncated: false,
NextMarker: undefined,
});
let d = nonAlphabeticalData.filter(e => t.filter(e, t.input));
let res = performListing(d, Delimiter, t.input, logger);
assert.deepStrictEqual(res, t.output);
test = new Test('alphabeticalOrder parameter set', {
delimiter: '/',
alphabeticalOrder: false,
}, {
Contents: [
receivedNonAlphaData[0],
receivedNonAlphaData[1],
],
Delimiter: '/',
CommonPrefixes: [],
IsTruncated: false,
NextMarker: undefined,
});
d = nonAlphabeticalData.filter(e => test.filter(e, test.input));
res = performListing(d, Delimiter, test.input, logger);
assert.deepStrictEqual(res, test.output);
});
t = new Test('alphabeticalOrder parameter set', {
delimiter: '/',
alphabeticalOrder: false,
}, {
Contents: [
receivedNonAlphaData[0],
receivedNonAlphaData[1],
],
Delimiter: '/',
CommonPrefixes: [],
IsTruncated: false,
NextMarker: undefined,
});
d = nonAlphabeticalData.filter(e => t.filter(e, t.input));
res = performListing(d, Delimiter, t.input, logger);
assert.deepStrictEqual(res, t.output);
});
});

View File

@ -34,7 +34,7 @@ const fakeLogger = {
};
describe('Delimiter All masters listing algorithm', () => {
it('should return SKIP_NONE for DelimiterMaster when both NextMarker ' +
test('should return SKIP_NONE for DelimiterMaster when both NextMarker ' +
'and NextContinuationToken are undefined', () => {
const delimiter = new DelimiterMaster({ delimiter: '/' }, fakeLogger);
@ -45,7 +45,7 @@ describe('Delimiter All masters listing algorithm', () => {
assert.strictEqual(delimiter.skipping(), SKIP_NONE);
});
it('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
test('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
'NextMarker is set and there is a delimiter', () => {
const key = 'key';
const delimiter = new DelimiterMaster({ delimiter: '/', marker: key },
@ -62,7 +62,7 @@ describe('Delimiter All masters listing algorithm', () => {
assert.strictEqual(delimiter.skipping(), key + VID_SEP);
});
it('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
test('should return <key><VersionIdSeparator> for DelimiterMaster when ' +
'NextContinuationToken is set and there is a delimiter', () => {
const key = 'key';
const delimiter = new DelimiterMaster(
@ -76,7 +76,7 @@ describe('Delimiter All masters listing algorithm', () => {
assert.strictEqual(delimiter.skipping(), key + VID_SEP);
});
it('should return NextMarker for DelimiterMaster when NextMarker is set' +
test('should return NextMarker for DelimiterMaster when NextMarker is set' +
', there is a delimiter and the key ends with the delimiter', () => {
const delimiterChar = '/';
const keyWithEndingDelimiter = `key${delimiterChar}`;
@ -91,7 +91,7 @@ describe('Delimiter All masters listing algorithm', () => {
assert.strictEqual(delimiter.skipping(), keyWithEndingDelimiter);
});
it('should skip entries not starting with prefix', () => {
test('should skip entries not starting with prefix', () => {
const delimiter = new DelimiterMaster({ prefix: 'prefix' }, fakeLogger);
assert.strictEqual(delimiter.filter({ key: 'wrong' }), FILTER_SKIP);
@ -100,7 +100,7 @@ describe('Delimiter All masters listing algorithm', () => {
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should skip entries superior to next marker', () => {
test('should skip entries superior to next marker', () => {
const delimiter = new DelimiterMaster({ marker: 'b' }, fakeLogger);
assert.strictEqual(delimiter.filter({ key: 'a' }), FILTER_SKIP);
@ -109,7 +109,7 @@ describe('Delimiter All masters listing algorithm', () => {
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should accept a master version', () => {
test('should accept a master version', () => {
const delimiter = new DelimiterMaster({}, fakeLogger);
const key = 'key';
const value = '';
@ -126,7 +126,7 @@ describe('Delimiter All masters listing algorithm', () => {
});
});
it('should accept a PHD version as first input', () => {
test('should accept a PHD version as first input', () => {
const delimiter = new DelimiterMaster({}, fakeLogger);
const keyPHD = 'keyPHD';
const objPHD = {
@ -143,7 +143,7 @@ describe('Delimiter All masters listing algorithm', () => {
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should accept a PHD version', () => {
test('should accept a PHD version', () => {
const delimiter = new DelimiterMaster({}, fakeLogger);
const key = 'keyA';
const value = '';
@ -172,7 +172,7 @@ describe('Delimiter All masters listing algorithm', () => {
});
});
it('should accept a version after a PHD', () => {
test('should accept a version after a PHD', () => {
const delimiter = new DelimiterMaster({}, fakeLogger);
const masterKey = 'key';
const keyVersion = `${masterKey}${VID_SEP}version`;
@ -202,7 +202,7 @@ describe('Delimiter All masters listing algorithm', () => {
});
});
it('should accept a delete marker', () => {
test('should accept a delete marker', () => {
const delimiter = new DelimiterMaster({}, fakeLogger);
const version = new Version({ isDeleteMarker: true });
const key = 'key';
@ -219,7 +219,7 @@ describe('Delimiter All masters listing algorithm', () => {
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should skip version after a delete marker', () => {
test('should skip version after a delete marker', () => {
const delimiter = new DelimiterMaster({}, fakeLogger);
const version = new Version({ isDeleteMarker: true });
const key = 'key';
@ -235,7 +235,7 @@ describe('Delimiter All masters listing algorithm', () => {
assert.deepStrictEqual(delimiter.result(), EmptyResult);
});
it('should accept a new key after a delete marker', () => {
test('should accept a new key after a delete marker', () => {
const delimiter = new DelimiterMaster({}, fakeLogger);
const version = new Version({ isDeleteMarker: true });
const key1 = 'key1';
@ -258,7 +258,7 @@ describe('Delimiter All masters listing algorithm', () => {
});
});
it('should accept the master version and skip the other ones', () => {
test('should accept the master version and skip the other ones', () => {
const delimiter = new DelimiterMaster({}, fakeLogger);
const masterKey = 'key';
const masterValue = 'value';
@ -286,7 +286,7 @@ describe('Delimiter All masters listing algorithm', () => {
});
});
it('should return good listing result for version', () => {
test('should return good listing result for version', () => {
const delimiter = new DelimiterMaster({}, fakeLogger);
const masterKey = 'key';
const versionKey1 = `${masterKey}${VID_SEP}version1`;
@ -320,61 +320,63 @@ describe('Delimiter All masters listing algorithm', () => {
});
});
it('should return good values for entries with different common prefixes',
() => {
const delimiterChar = '/';
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
const commonPrefix2 = `commonPrefix2${delimiterChar}`;
const prefix1Key1 = `${commonPrefix1}key1`;
const prefix1Key2 = `${commonPrefix1}key2`;
const prefix2Key1 = `${commonPrefix2}key1`;
const value = 'value';
test(
'should return good values for entries with different common prefixes',
() => {
const delimiterChar = '/';
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
const commonPrefix2 = `commonPrefix2${delimiterChar}`;
const prefix1Key1 = `${commonPrefix1}key1`;
const prefix1Key2 = `${commonPrefix1}key2`;
const prefix2Key1 = `${commonPrefix2}key1`;
const value = 'value';
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
fakeLogger);
const delimiter = new DelimiterMaster({ delimiter: delimiterChar },
fakeLogger);
/* Filter the first entry with a common prefix. It should be
* accepted and added to the result. */
assert.strictEqual(delimiter.filter({ key: prefix1Key1, value }),
FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
/* Filter the first entry with a common prefix. It should be
* accepted and added to the result. */
assert.strictEqual(delimiter.filter({ key: prefix1Key1, value }),
FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
/* Filter the second entry with the same common prefix than the
* first entry. It should be skipped and not added to the result. */
assert.strictEqual(delimiter.filter({ key: prefix1Key2, value }),
FILTER_SKIP);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
/* Filter the second entry with the same common prefix than the
* first entry. It should be skipped and not added to the result. */
assert.strictEqual(delimiter.filter({ key: prefix1Key2, value }),
FILTER_SKIP);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
/* Filter an entry with a new common prefix. It should be accepted
* and not added to the result. */
assert.strictEqual(delimiter.filter({ key: prefix2Key1, value }),
FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1, commonPrefix2],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
});
/* Filter an entry with a new common prefix. It should be accepted
* and not added to the result. */
assert.strictEqual(delimiter.filter({ key: prefix2Key1, value }),
FILTER_ACCEPT);
assert.deepStrictEqual(delimiter.result(), {
CommonPrefixes: [commonPrefix1, commonPrefix2],
Contents: [],
IsTruncated: false,
NextMarker: undefined,
Delimiter: delimiterChar,
});
}
);
/* We test here the internal management of the prvKey field of the
* DelimiterMaster class, in particular once it has been set to an entry
* key before to finally skip this entry because of an already present
* common prefix. */
it('should accept a version after skipping an object because of its ' +
test('should accept a version after skipping an object because of its ' +
'commonPrefix', () => {
const delimiterChar = '/';
const commonPrefix1 = `commonPrefix1${delimiterChar}`;
@ -409,7 +411,7 @@ describe('Delimiter All masters listing algorithm', () => {
});
});
it('should skip a versioned entry when there is a delimiter and the key ' +
test('should skip a versioned entry when there is a delimiter and the key ' +
'starts with the NextMarker value', () => {
const delimiterChar = '/';
const commonPrefix = `commonPrefix${delimiterChar}`;

View File

@ -284,7 +284,7 @@ const tests = [
];
describe('Delimiter All Versions listing algorithm', () => {
it('Should return good skipping value for DelimiterVersions', done => {
test('Should return good skipping value for DelimiterVersions', done => {
const delimiter = new DelimiterVersions({ delimiter: '/' });
for (let i = 0; i < 100; i++) {
delimiter.filter({ key: `foo/${zpad(i)}`, value: '{}' });
@ -293,13 +293,13 @@ describe('Delimiter All Versions listing algorithm', () => {
done();
});
tests.forEach(test => {
it(`Should list ${test.name}`, done => {
tests.forEach(t => {
test(`Should list ${t.name}`, done => {
// Simulate skip scan done by LevelDB
const d = dataVersioned.filter(e => test.filter(e, test.input));
const d = dataVersioned.filter(e => t.filter(e, t.input));
const res =
performListing(d, DelimiterVersions, test.input, logger);
assert.deepStrictEqual(res, test.output);
performListing(d, DelimiterVersions, t.input, logger);
assert.deepStrictEqual(res, t.output);
done();
});
});

View File

@ -16,7 +16,7 @@ describe('checkLimit function', () => {
{ input: [0, 0], output: 0 },
];
tests.forEach((test, index) => {
it(`test${index}`, done => {
test(`test${index}`, done => {
const res = checkLimit(test.input[0], test.input[1]);
assert.deepStrictEqual(res, test.output);
done();

View File

@ -23,11 +23,11 @@ const infoFromVault = {
const authInfo = new AuthInfo(infoFromVault);
describe('AuthInfo class constructor', () => {
it('should return an object', () => {
test('should return an object', () => {
assert.strictEqual(typeof authInfo, 'object');
});
it('should set properties', () => {
test('should set properties', () => {
assert.strictEqual(authInfo.arn, arn);
assert.strictEqual(authInfo.canonicalID, canonicalID);
assert.strictEqual(authInfo.shortid, shortid);
@ -36,51 +36,51 @@ describe('AuthInfo class constructor', () => {
assert.strictEqual(authInfo.IAMdisplayName, IAMdisplayName);
});
it('should have a working getArn() method', () => {
test('should have a working getArn() method', () => {
assert.strictEqual(authInfo.getArn(), arn);
});
it('should have a working getCanonicalID() method', () => {
test('should have a working getCanonicalID() method', () => {
assert.strictEqual(authInfo.getCanonicalID(), canonicalID);
});
it('should have a working getShortid() method', () => {
test('should have a working getShortid() method', () => {
assert.strictEqual(authInfo.getShortid(), shortid);
});
it('should have a working getEmail() method', () => {
test('should have a working getEmail() method', () => {
assert.strictEqual(authInfo.getEmail(), email);
});
it('should have a working getAccountDisplayName() method', () => {
test('should have a working getAccountDisplayName() method', () => {
assert.strictEqual(authInfo.getAccountDisplayName(),
accountDisplayName);
});
it('should have a working getIAMdisplayName() method', () => {
test('should have a working getIAMdisplayName() method', () => {
assert.strictEqual(authInfo.getIAMdisplayName(), IAMdisplayName);
});
it('should have a working isRequesterAnIAMUser() method', () => {
test('should have a working isRequesterAnIAMUser() method', () => {
assert.strictEqual(authInfo.isRequesterAnIAMUser(), true);
const accountUser = new AuthInfo({ canonicalID: 'account' });
assert.strictEqual(accountUser.isRequesterAnIAMUser(), false);
});
it('should have a working isRequesterPublicUser() method', () => {
test('should have a working isRequesterPublicUser() method', () => {
assert.strictEqual(authInfo.isRequesterPublicUser(), false);
const publicUser = new AuthInfo({ canonicalID: constants.publicId });
assert.strictEqual(publicUser.isRequesterPublicUser(), true);
});
it('should have a working isRequesterAServiceAccount() method', () => {
test('should have a working isRequesterAServiceAccount() method', () => {
assert.strictEqual(authInfo.isRequesterAServiceAccount(), false);
const serviceAccount = new AuthInfo({
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
assert.strictEqual(serviceAccount.isRequesterAServiceAccount(), true);
});
it('should have a working isRequesterThisServiceAccount() method', () => {
test('should have a working isRequesterThisServiceAccount() method', () => {
const serviceAccount = new AuthInfo({
canonicalID: `${constants.zenkoServiceAccount}/clueso` });
assert.strictEqual(

View File

@ -78,20 +78,19 @@ describe('AuthLoader class', () => {
['accounts.0.canonicalID', 64],
['accounts.0.keys', 'not an Array'],
['accounts.0.keys', undefined],
].forEach(test => {
if (test[1] === undefined) {
].forEach(t => {
if (t[1] === undefined) {
// Check a failure when deleting required fields
it(`should fail when missing field ${test[0]}`, done => {
test(`should fail when missing field ${t[0]}`, done => {
should._exec = shouldFail;
should.missingField(obj, test[0], done);
should.missingField(obj, t[0], done);
});
} else {
// Check a failure when the type of field is different than
// expected
it(`should fail when modified field ${test[0]} ${test[1]}`,
done => {
test(`should fail when modified field ${t[0]} ${t[1]}`, done => {
should._exec = shouldFail;
should.modifiedField(obj, test[0], test[1], done);
should.modifiedField(obj, t[0], t[1], done);
});
}
});
@ -103,30 +102,30 @@ describe('AuthLoader class', () => {
[
'accounts.0.keys',
'accounts.0.users',
].forEach(test => {
].forEach(t => {
// Check a success when deleting optional fields
it(`should return success when missing field ${test}`, done => {
test(`should return success when missing field ${t}`, done => {
should._exec = shouldSucceed;
should.missingField(obj, test[0], done);
should.missingField(obj, t[0], done);
});
});
it('Should return error on two same canonicalID', done => {
test('Should return error on two same canonicalID', done => {
obj.accounts[0].canonicalID = obj.accounts[1].canonicalID;
shouldFail(obj, done);
});
it('Should return error on two same emails', done => {
test('Should return error on two same emails', done => {
obj.accounts[0].email = obj.accounts[1].email;
shouldFail(obj, done);
});
it('Should return error on two same arn', done => {
test('Should return error on two same arn', done => {
obj.accounts[0].arn = obj.accounts[1].arn;
shouldFail(obj, done);
});
it('Should return error on two same access key', done => {
test('Should return error on two same access key', done => {
obj.accounts[0].keys[0].access = obj.accounts[1].keys[0].access;
shouldFail(obj, done);
});

View File

@ -17,7 +17,7 @@ const searchEmail2 = 'sampleaccount4@sampling.com';
const expectCanId2 = 'newCanId';
describe('S3 in_memory auth backend', () => {
it('should find an account', done => {
test('should find an account', done => {
const backend = new Backend(JSON.parse(JSON.stringify(ref)));
backend.getCanonicalIds([searchEmail], log, (err, res) => {
assert.strictEqual(res.message.body[searchEmail],
@ -26,7 +26,7 @@ describe('S3 in_memory auth backend', () => {
});
});
it('should clear old account authdata on refresh', done => {
test('should clear old account authdata on refresh', done => {
const backend = new Backend(JSON.parse(JSON.stringify(ref)));
backend.refreshAuthData(obj2);
backend.getCanonicalIds([searchEmail], log, (err, res) => {
@ -35,7 +35,7 @@ describe('S3 in_memory auth backend', () => {
});
});
it('should add new account authdata on refresh', done => {
test('should add new account authdata on refresh', done => {
const backend = new Backend(JSON.parse(JSON.stringify(ref)));
backend.refreshAuthData(obj2);
backend.getCanonicalIds([searchEmail2], log, (err, res) => {

View File

@ -14,28 +14,28 @@ describe('S3 AuthData Indexer', () => {
done();
});
it('Should return account from canonicalID', done => {
test('Should return account from canonicalID', done => {
const res = index.getEntityByCanId(obj.accounts[0].canonicalID);
assert.strictEqual(typeof res, 'object');
assert.strictEqual(res.arn, obj.accounts[0].arn);
done();
});
it('Should return account from email', done => {
test('Should return account from email', done => {
const res = index.getEntityByEmail(obj.accounts[1].email);
assert.strictEqual(typeof res, 'object');
assert.strictEqual(res.canonicalID, obj.accounts[1].canonicalID);
done();
});
it('Should return account from key', done => {
test('Should return account from key', done => {
const res = index.getEntityByKey(obj.accounts[0].keys[0].access);
assert.strictEqual(typeof res, 'object');
assert.strictEqual(res.arn, obj.accounts[0].arn);
done();
});
it('should index account without keys', done => {
test('should index account without keys', done => {
should._exec = () => {
index = new Indexer(obj);
const res = index.getEntityByEmail(obj.accounts[0].email);
@ -46,7 +46,7 @@ describe('S3 AuthData Indexer', () => {
should.missingField(obj, 'accounts.0.keys');
});
it('should index account without users', done => {
test('should index account without users', done => {
should._exec = () => {
index = new Indexer(obj);
const res = index.getEntityByEmail(obj.accounts[0].email);

View File

@ -13,30 +13,32 @@ const gcpCanonicalizedResource = request =>
getCanonicalizedResource(request, 'GCP');
describe('canonicalization', () => {
it('should construct a canonicalized header in the correct order for AWS',
() => {
const headers = {
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
'x-amz-request-payer': 'requester',
'x-amz-meta-meta': 'something very meta',
'x-amz-meta-bits': '0',
'x-amz-meta-blksize': '2097152',
'x-amz-meta-compress': '0',
'authorization': 'AWS accessKey1:V8g5UJUFmMzruMqUHVT6ZwvUw+M=',
'host': 's3.amazonaws.com:80',
'connection': 'Keep-Alive',
'user-agent': 'Cyberduck/4.7.2.18004 (Mac OS X/10.10.5) (x86_64)',
};
const canonicalizedHeader = getCanonicalizedAmzHeaders(headers);
assert.strictEqual(canonicalizedHeader,
'x-amz-meta-bits:0\n' +
'x-amz-meta-blksize:2097152\n' +
'x-amz-meta-compress:0\n' +
'x-amz-meta-meta:something very meta\n' +
'x-amz-request-payer:requester\n');
});
test(
'should construct a canonicalized header in the correct order for AWS',
() => {
const headers = {
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
'x-amz-request-payer': 'requester',
'x-amz-meta-meta': 'something very meta',
'x-amz-meta-bits': '0',
'x-amz-meta-blksize': '2097152',
'x-amz-meta-compress': '0',
'authorization': 'AWS accessKey1:V8g5UJUFmMzruMqUHVT6ZwvUw+M=',
'host': 's3.amazonaws.com:80',
'connection': 'Keep-Alive',
'user-agent': 'Cyberduck/4.7.2.18004 (Mac OS X/10.10.5) (x86_64)',
};
const canonicalizedHeader = getCanonicalizedAmzHeaders(headers);
assert.strictEqual(canonicalizedHeader,
'x-amz-meta-bits:0\n' +
'x-amz-meta-blksize:2097152\n' +
'x-amz-meta-compress:0\n' +
'x-amz-meta-meta:something very meta\n' +
'x-amz-request-payer:requester\n');
}
);
it('should return an empty string as the canonicalized ' +
test('should return an empty string as the canonicalized ' +
'header if no amz headers', () => {
const headers = {
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
@ -49,7 +51,7 @@ describe('canonicalization', () => {
assert.strictEqual(canonicalizedHeader, '');
});
it('should construct a canonicalized resource for AWS', () => {
test('should construct a canonicalized resource for AWS', () => {
const request = {
headers: { host: 'bucket.s3.amazonaws.com:80' },
url: '/obj',
@ -65,7 +67,7 @@ describe('canonicalization', () => {
'/bucket/obj?requestPayment=yes,please');
});
it('should return the path as the canonicalized resource ' +
test('should return the path as the canonicalized resource ' +
'if no bucket name, overriding headers or delete query for AWS', () => {
const request = {
headers: { host: 's3.amazonaws.com:80' },
@ -76,7 +78,7 @@ describe('canonicalization', () => {
assert.strictEqual(canonicalizedResource, '/');
});
it('should sort the subresources (included query params) in ' +
test('should sort the subresources (included query params) in ' +
'lexicographical order for AWS', () => {
const request = {
headers: { host: 's3.amazonaws.com:80' },
@ -91,30 +93,32 @@ describe('canonicalization', () => {
'/?partNumber=5&uploadId=iamanuploadid');
});
it('should construct a canonicalized header in the correct order for GCP',
() => {
const headers = {
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
'x-goog-request-payer': 'requester',
'x-goog-meta-meta': 'something very meta',
'x-goog-meta-bits': '0',
'x-goog-meta-blksize': '2097152',
'x-goog-meta-compress': '0',
'authorization': 'GOOG1 accessKey1:V8g5UJUFmMzruMqUHVT6ZwvUw+M=',
'host': 's3.amazonaws.com:80',
'connection': 'Keep-Alive',
'user-agent': 'Cyberduck/4.7.2.18004 (Mac OS X/10.10.5) (x86_64)',
};
const canonicalizedHeader = getCanonicalizedGcpHeaders(headers);
assert.strictEqual(canonicalizedHeader,
'x-goog-meta-bits:0\n' +
'x-goog-meta-blksize:2097152\n' +
'x-goog-meta-compress:0\n' +
'x-goog-meta-meta:something very meta\n' +
'x-goog-request-payer:requester\n');
});
test(
'should construct a canonicalized header in the correct order for GCP',
() => {
const headers = {
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
'x-goog-request-payer': 'requester',
'x-goog-meta-meta': 'something very meta',
'x-goog-meta-bits': '0',
'x-goog-meta-blksize': '2097152',
'x-goog-meta-compress': '0',
'authorization': 'GOOG1 accessKey1:V8g5UJUFmMzruMqUHVT6ZwvUw+M=',
'host': 's3.amazonaws.com:80',
'connection': 'Keep-Alive',
'user-agent': 'Cyberduck/4.7.2.18004 (Mac OS X/10.10.5) (x86_64)',
};
const canonicalizedHeader = getCanonicalizedGcpHeaders(headers);
assert.strictEqual(canonicalizedHeader,
'x-goog-meta-bits:0\n' +
'x-goog-meta-blksize:2097152\n' +
'x-goog-meta-compress:0\n' +
'x-goog-meta-meta:something very meta\n' +
'x-goog-request-payer:requester\n');
}
);
it('should return an empty string as the canonicalized ' +
test('should return an empty string as the canonicalized ' +
'header if no goog headers', () => {
const headers = {
'date': 'Mon, 21 Sep 2015 22:29:27 GMT',
@ -127,7 +131,7 @@ describe('canonicalization', () => {
assert.strictEqual(canonicalizedHeader, '');
});
it('should construct a canonicalized resource for GCP', () => {
test('should construct a canonicalized resource for GCP', () => {
const request = {
headers: { host: 'bucket.storage.googapis.com:80' },
url: '/obj',
@ -143,7 +147,7 @@ describe('canonicalization', () => {
'/bucket/obj?billing=yes,please');
});
it('should return the path as the canonicalized resource ' +
test('should return the path as the canonicalized resource ' +
'if no bucket name, overriding headers or delete query for GCP', () => {
const request = {
headers: { host: 'storage.googleapis.com:80' },
@ -155,7 +159,7 @@ describe('canonicalization', () => {
});
it('should sort the subresources (included query params) in ' +
test('should sort the subresources (included query params) in ' +
'lexicographical order for GCP', () => {
const request = {
headers: { host: 'storage.googleapis.com:80' },

View File

@ -10,28 +10,28 @@ const errors = require('../../../../index').errors;
const log = new DummyRequestLogger();
describe('checkTimestamp for timecheck in header auth', () => {
it('should return AccessDenied error if the date in the ' +
test('should return AccessDenied error if the date in the ' +
'header is before epochTime', () => {
const timestamp = new Date('1950-01-01');
const timeoutResult = checkRequestExpiry(timestamp, log);
assert.deepStrictEqual(timeoutResult, errors.AccessDenied);
});
it('should return RequestTimeTooSkewed error if the date in the ' +
test('should return RequestTimeTooSkewed error if the date in the ' +
'header is more than 15 minutes old', () => {
const timestamp = new Date(Date.now() - 16 * 60000);
const timeoutResult = checkRequestExpiry(timestamp, log);
assert.deepStrictEqual(timeoutResult, errors.RequestTimeTooSkewed);
});
it('should return RequestTimeTooSkewed error if the date in ' +
test('should return RequestTimeTooSkewed error if the date in ' +
'the header is more than 15 minutes in the future', () => {
const timestamp = new Date(Date.now() + 16 * 60000);
const timeoutResult = checkRequestExpiry(timestamp, log);
assert.deepStrictEqual(timeoutResult, errors.RequestTimeTooSkewed);
});
it('should return no error if the date in the header is ' +
test('should return no error if the date in the header is ' +
'within 15 minutes of current time', () => {
const timestamp = new Date();
const timeoutResult = checkRequestExpiry(timestamp, log);

View File

@ -9,7 +9,7 @@ const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
const log = new DummyRequestLogger();
describe('v2 constructStringToSign function', () => {
it('should construct a stringToSign with query params treated ' +
test('should construct a stringToSign with query params treated ' +
'like headers (e.g. x-amz-acl) for AWS', () => {
const request = {
url: '/noderocks/cuteotter.jpeg?AWSAccessKeyId' +
@ -45,7 +45,7 @@ describe('v2 constructStringToSign function', () => {
assert.strictEqual(actualOutput, expectedOutput);
});
it('should construct a stringToSign with query params treated ' +
test('should construct a stringToSign with query params treated ' +
'like headers (e.g. x-goog-acl) for GCP', () => {
const request = {
url: '/noderocks/cuteotter.jpeg?AWSAccessKeyId' +

View File

@ -11,14 +11,14 @@ describe('v2: headerAuthCheck', () => {
{ token: undefined, error: false },
{ token: 'invalid-token', error: true },
{ token: 'a'.repeat(128), error: false },
].forEach(test => it(`test with token(${test.token})`, () => {
].forEach(t => test(`test with token(${t.token})`, () => {
const request = {
headers: {
'x-amz-security-token': test.token,
'x-amz-security-token': t.token,
},
};
const res = headerAuthCheck(request, log, {});
if (test.error) {
if (t.error) {
assert.notStrictEqual(res.err, undefined);
assert.strictEqual(res.err.InvalidToken, true);
} else {

View File

@ -13,7 +13,7 @@ const RequestContext =
const logger = new DummyRequestLogger();
describe('Public Access', () => {
it('should grant access to a user that provides absolutely' +
test('should grant access to a user that provides absolutely' +
'no authentication information and should assign that user the ' +
'All Users Group accessKey', done => {
const request = {
@ -38,7 +38,7 @@ describe('Public Access', () => {
}, 's3', requestContext);
});
it('should not grant access to a request that contains ' +
test('should not grant access to a request that contains ' +
'an authorization header without proper credentials', done => {
const request = {
method: 'GET',

View File

@ -11,13 +11,13 @@ describe('v2: queryAuthCheck', () => {
{ token: undefined, error: false },
{ token: 'invalid-token', error: true },
{ token: 'a'.repeat(128), error: false },
].forEach(test => it(`test with token(${test.token})`, () => {
].forEach(t => test(`test with token(${t.token})`, () => {
const request = { method: 'GET' };
const data = {
SecurityToken: test.token,
SecurityToken: t.token,
};
const res = queryAuthCheck(request, log, data);
if (test.error) {
if (t.error) {
assert.notStrictEqual(res.err, undefined);
assert.strictEqual(res.err.InvalidToken, true);
} else {

View File

@ -14,7 +14,7 @@ const gcpConstructStringToSign = (request, query, log) =>
constructStringToSign(request, query, log, 'GCP');
describe('checkAuth reconstruction of signature', () => {
it('should reconstruct the signature for a ' +
test('should reconstruct the signature for a ' +
'GET request from s3-curl for AWS', () => {
// Based on s3-curl run
const request = {
@ -35,7 +35,7 @@ describe('checkAuth reconstruction of signature', () => {
assert.strictEqual(reconstructedSig, 'MJNF7AqNapSu32TlBOVkcAxj58c=');
});
it('should reconstruct the signature for a GET request from ' +
test('should reconstruct the signature for a GET request from ' +
'CyberDuck for AWS', () => {
// Based on CyberDuck request
const request = {
@ -58,7 +58,7 @@ describe('checkAuth reconstruction of signature', () => {
assert.strictEqual(reconstructedSig, 'V8g5UJUFmMzruMqUHVT6ZwvUw+M=');
});
it('should reconstruct the signature for a PUT request from ' +
test('should reconstruct the signature for a PUT request from ' +
's3cmd for AWS', () => {
// Based on s3cmd run
const request = {
@ -86,7 +86,7 @@ describe('checkAuth reconstruction of signature', () => {
assert.strictEqual(reconstructedSig, 'fWPcicKn7Fhzfje/0pRTifCxL44=');
});
it('should reconstruct the signature for a ' +
test('should reconstruct the signature for a ' +
'GET request from s3-curl for GCP', () => {
// Based on s3-curl run
const request = {
@ -109,7 +109,7 @@ describe('checkAuth reconstruction of signature', () => {
assert.strictEqual(reconstructedSig, 'MJNF7AqNapSu32TlBOVkcAxj58c=');
});
it('should reconstruct the signature for a GET request from ' +
test('should reconstruct the signature for a GET request from ' +
'CyberDuck for GCP', () => {
// Based on CyberDuck request
const request = {
@ -134,7 +134,7 @@ describe('checkAuth reconstruction of signature', () => {
assert.strictEqual(reconstructedSig, 'bdcnXSDhpN0lR2NBUlayg4vmMDU=');
});
it('should reconstruct the signature for a PUT request from ' +
test('should reconstruct the signature for a PUT request from ' +
's3cmd for GCP', () => {
// Based on s3cmd run
const request = {

View File

@ -8,7 +8,7 @@ const awsURIencode = require('../../../../lib/auth/v4/awsURIencode');
// AWS.util.uriEscapePath and AWS.util.uriEscape functions
// (see aws-sdk lib/signers/v4.js)
describe('should URIencode in accordance with AWS rules', () => {
it('should not encode / if give false argument', () => {
test('should not encode / if give false argument', () => {
const input1 = '/s3amazonaws.com/?$*@whateverASFEFWE()@)(*#@+ )';
const expectedOutput1 = '/s3amazonaws.com/%3F%24%2A%40whatever' +
'ASFEFWE%28%29%40%29%28%2A%23%40%2B%20%20%29';
@ -24,7 +24,7 @@ describe('should URIencode in accordance with AWS rules', () => {
assert.strictEqual(actualOutput2, expectedOutput2);
});
it('should encode / if no second argument given', () => {
test('should encode / if no second argument given', () => {
const input1 = '/s3amazonaws.com/?$*@whateverASFEFWE()@)(*#@+ )';
const expectedOutput1 = '%2Fs3amazonaws.com%2F%3F%24%2A%40whatever' +
'ASFEFWE%28%29%40%29%28%2A%23%40%2B%20%20%29';
@ -40,7 +40,7 @@ describe('should URIencode in accordance with AWS rules', () => {
assert.strictEqual(actualOutput2, expectedOutput2);
});
it('should encode native language characters', () => {
test('should encode native language characters', () => {
const input = '/s3amazonaws.com/Pâtisserie=中文-español-English' +
'-हिन्दी-العربية-português-বাংলা-русский-日本語-ਪੰਜਾਬੀ-한국어-தமிழ்';
const expectedOutput = '%2Fs3amazonaws.com%2FP%C3%A2tisserie%3D%E4' +

View File

@ -14,7 +14,7 @@ const log = new DummyRequestLogger();
describe(item.desc, () => {
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
// latest/API/sig-v4-header-based-auth.html
it('should construct a stringToSign in accordance ' +
test('should construct a stringToSign in accordance ' +
'with AWS rules for a get object request (header auth)', () => {
const path = '/test.txt';
const params = {
@ -55,7 +55,7 @@ const log = new DummyRequestLogger();
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
// latest/API/sig-v4-header-based-auth.html
it('should construct a stringToSign in accordance ' +
test('should construct a stringToSign in accordance ' +
'with AWS rules for a put object request (header auth)', () => {
const path = '/test$file.text';
const params = {
@ -98,40 +98,39 @@ const log = new DummyRequestLogger();
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
// latest/API/sig-v4-header-based-auth.html
it('should construct a stringToSign in accordance ' +
'with AWS rules for a pre-signed get url request (query auth)',
() => {
const path = '/test.txt';
const params = {
request: {
method: 'GET',
path: `${item.path}${path}`,
headers: {
host: 'examplebucket.s3.amazonaws.com',
},
test('should construct a stringToSign in accordance ' +
'with AWS rules for a pre-signed get url request (query auth)', () => {
const path = '/test.txt';
const params = {
request: {
method: 'GET',
path: `${item.path}${path}`,
headers: {
host: 'examplebucket.s3.amazonaws.com',
},
query: {
'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
'X-Amz-Credential': 'AKIAIOSFODNN7EXAMPLE/20130524/' +
'us-east-1/s3/aws4_request',
'X-Amz-Date': '20130524T000000Z',
'X-Amz-Expires': '86400',
'X-Amz-SignedHeaders': 'host',
},
signedHeaders: 'host',
payloadChecksum: 'UNSIGNED-PAYLOAD',
credentialScope: '20130524/us-east-1/s3/aws4_request',
timestamp: '20130524T000000Z',
log,
proxyPath: item.path ? path : undefined,
};
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
'20130524T000000Z\n' +
'20130524/us-east-1/s3/aws4_request\n' +
'3bfa292879f6447bbcda7001decf97f4a54d' +
'c650c8942174ae0a9121cf58ad04';
const actualOutput = constructStringToSign(params);
assert.strictEqual(actualOutput, expectedOutput);
});
},
query: {
'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
'X-Amz-Credential': 'AKIAIOSFODNN7EXAMPLE/20130524/' +
'us-east-1/s3/aws4_request',
'X-Amz-Date': '20130524T000000Z',
'X-Amz-Expires': '86400',
'X-Amz-SignedHeaders': 'host',
},
signedHeaders: 'host',
payloadChecksum: 'UNSIGNED-PAYLOAD',
credentialScope: '20130524/us-east-1/s3/aws4_request',
timestamp: '20130524T000000Z',
log,
proxyPath: item.path ? path : undefined,
};
const expectedOutput = 'AWS4-HMAC-SHA256\n' +
'20130524T000000Z\n' +
'20130524/us-east-1/s3/aws4_request\n' +
'3bfa292879f6447bbcda7001decf97f4a54d' +
'c650c8942174ae0a9121cf58ad04';
const actualOutput = constructStringToSign(params);
assert.strictEqual(actualOutput, expectedOutput);
});
});
});

View File

@ -9,7 +9,7 @@ const createCanonicalRequest =
describe('createCanonicalRequest function', () => {
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
// latest/API/sig-v4-header-based-auth.html
it('should construct a canonical request in accordance ' +
test('should construct a canonical request in accordance ' +
'with AWS rules for a get object request (header auth)', () => {
const params = {
pHttpVerb: 'GET',
@ -48,7 +48,7 @@ describe('createCanonicalRequest function', () => {
const msg = 'S3C-820: aws java sdk should not encode * ' +
'character for signature';
it(msg, () => {
test(msg, () => {
const doc = JSON.stringify({
Statement: [{
Action: 's3:*',
@ -92,7 +92,7 @@ describe('createCanonicalRequest function', () => {
// Example taken from: http://docs.aws.amazon.com/AmazonS3/
// latest/API/sig-v4-header-based-auth.html
it('should construct a canonical request in accordance ' +
test('should construct a canonical request in accordance ' +
'with AWS rules for a put object request (header auth)', () => {
const params = {
pHttpVerb: 'PUT',
@ -134,7 +134,7 @@ describe('createCanonicalRequest function', () => {
// Example taken from: http://docs.aws.amazon.com/AmazonS3/latest/API/
// sigv4-query-string-auth.html
it('should construct a canonical request in accordance ' +
test('should construct a canonical request in accordance ' +
'with AWS rules for a pre-signed get url request (query auth)', () => {
const params = {
pHttpVerb: 'GET',
@ -167,7 +167,7 @@ describe('createCanonicalRequest function', () => {
});
it('should construct a canonical request that contains upper and ' +
test('should construct a canonical request that contains upper and ' +
'lower case query params and query params treated like headers ' +
'(x-amz-acl)', () => {
const params = {
@ -208,7 +208,7 @@ describe('createCanonicalRequest function', () => {
assert.strictEqual(actualOutput, expectedOutput);
});
it('should construct a canonical request that contains a ' +
test('should construct a canonical request that contains a ' +
'signed header with an empty string value', () => {
const params = {
pHttpVerb: 'PUT',
@ -231,7 +231,7 @@ describe('createCanonicalRequest function', () => {
assert.strictEqual(actualOutput, expectedOutput);
});
it('should construct a canonical request that contains a ' +
test('should construct a canonical request that contains a ' +
'signed expect header even if expect header value was ' +
'stripped by the load balancer', () => {
const params = {
@ -254,7 +254,7 @@ describe('createCanonicalRequest function', () => {
assert.strictEqual(actualOutput, expectedOutput);
});
it('should trim white space in a canonical header value so that ' +
test('should trim white space in a canonical header value so that ' +
'there is no white space before or after a value and any sequential ' +
'white space becomes a single space', () => {
const params = {

View File

@ -40,11 +40,11 @@ describe('v4 headerAuthCheck', () => {
{ token: undefined, error: false },
{ token: 'invalid-token', error: true },
{ token: 'a'.repeat(128), error: false },
].forEach(test => it(`test with token(${test.token})`, () => {
].forEach(t => test(`test with token(${t.token})`, () => {
const alteredRequest = createAlteredRequest({
'x-amz-security-token': test.token }, 'headers', request, headers);
'x-amz-security-token': t.token }, 'headers', request, headers);
const res = headerAuthCheck(alteredRequest, log);
if (test.error) {
if (t.error) {
assert.notStrictEqual(res.err, undefined);
assert.strictEqual(res.err.InvalidToken, true);
} else {
@ -52,7 +52,7 @@ describe('v4 headerAuthCheck', () => {
}
}));
it('should return error if undefined authorization header', done => {
test('should return error if undefined authorization header', done => {
const alteredRequest = createAlteredRequest({
authorization: undefined }, 'headers', request, headers);
const res = headerAuthCheck(alteredRequest, log);
@ -60,7 +60,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if undefined sha256 header', done => {
test('should return error if undefined sha256 header', done => {
const alteredRequest = createAlteredRequest({
'x-amz-content-sha256': undefined }, 'headers', request, headers);
const res = headerAuthCheck(alteredRequest, log);
@ -68,7 +68,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if host is not included as signed header', done => {
test('should return error if host is not included as signed header', done => {
const alteredRequest = createAlteredRequest({
authorization: 'AWS4-HMAC-SHA256 Credential=accessKey1/20160208' +
'/us-east-1/s3/aws4_request, ' +
@ -81,7 +81,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if an x-amz header is not included as signed ' +
test('should return error if an x-amz header is not included as signed ' +
'header but is in request', done => {
const alteredRequest = createAlteredRequest({
'x-amz-acl': 'public' }, 'headers', request, headers);
@ -90,7 +90,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if an x-scal header is not included as signed ' +
test('should return error if an x-scal header is not included as signed ' +
'header but is in request', done => {
const alteredRequest = createAlteredRequest({
'x-scal-encryption': 'true' }, 'headers', request, headers);
@ -99,7 +99,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if missing credentials', done => {
test('should return error if missing credentials', done => {
const alteredRequest = createAlteredRequest({
authorization: 'AWS4-HMAC-SHA256 SignedHeaders=host;' +
'x-amz-content-sha256;x-amz-date, Signature=abed9' +
@ -110,7 +110,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if missing SignedHeaders', done => {
test('should return error if missing SignedHeaders', done => {
// 'Sigheaders' instead of SignedHeaders in authorization
const alteredRequest = createAlteredRequest({
authorization: 'AWS4-HMAC-SHA256 Credential=accessKey1' +
@ -124,7 +124,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if missing Signature', done => {
test('should return error if missing Signature', done => {
// Sig instead of 'Signature' in authorization
const alteredRequest = createAlteredRequest({
authorization: 'AWS4-HMAC-SHA256 Credential=accessKey1' +
@ -138,7 +138,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if missing timestamp', done => {
test('should return error if missing timestamp', done => {
const alteredRequest = createAlteredRequest({
'x-amz-date': undefined }, 'headers', request, headers);
const res = headerAuthCheck(alteredRequest, log);
@ -148,7 +148,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if scope date does not ' +
test('should return error if scope date does not ' +
'match timestamp date', done => {
// Different timestamp (2015 instead of 2016)
const alteredRequest = createAlteredRequest({
@ -158,7 +158,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if timestamp from x-amz-date header' +
test('should return error if timestamp from x-amz-date header' +
'is before epochTime', done => {
// Different date (2095 instead of 2016)
const alteredRequest = createAlteredRequest({
@ -176,7 +176,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if timestamp from x-amz-date header' +
test('should return error if timestamp from x-amz-date header' +
'is in the future', done => {
// Different date (2095 instead of 2016)
const alteredRequest = createAlteredRequest({
@ -192,7 +192,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if timestamp from date header' +
test('should return error if timestamp from date header' +
' is in the future (and there is no x-amz-date header)', done => {
const alteredRequest = createAlteredRequest({
date: 'Tue, 08 Feb 2095 20:14:05 GMT',
@ -209,7 +209,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if timestamp from x-amz-date header' +
test('should return error if timestamp from x-amz-date header' +
'is too old', done => {
// Different scope date and x-amz-date (2015 instead of 2016)
const alteredRequest = createAlteredRequest({
@ -226,7 +226,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should return error if timestamp from date header' +
test('should return error if timestamp from date header' +
'is too old (and there is no x-amz-date header)', done => {
// Different scope date (2015 instead of 2016) and date in 2015
const alteredRequest = createAlteredRequest({
@ -244,7 +244,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should not return error due to unknown region', done => {
test('should not return error due to unknown region', done => {
// Returning an error causes an issue for certain clients.
const alteredRequest = createAlteredRequest({
authorization: 'AWS4-HMAC-SHA256 Credential=accessKey1/20160208' +
@ -260,7 +260,7 @@ describe('v4 headerAuthCheck', () => {
done();
});
it('should successfully return v4 and no error', done => {
test('should successfully return v4 and no error', done => {
// Freezes time so date created within function will be Feb 8, 2016
const clock = lolex.install(1454962445000);
const res = headerAuthCheck(request, log);

View File

@ -35,7 +35,7 @@ const request = {
};
describe('v4 queryAuthCheck', () => {
it('should return error if algorithm param incorrect', done => {
test('should return error if algorithm param incorrect', done => {
const alteredRequest = createAlteredRequest({ 'X-Amz-Algorithm':
'AWS4-HMAC-SHA1' }, 'query', request, query);
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
@ -43,7 +43,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if X-Amz-Credential param is undefined', done => {
test('should return error if X-Amz-Credential param is undefined', done => {
const alteredRequest = createAlteredRequest({ 'X-Amz-Credential':
undefined }, 'query', request, query);
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
@ -51,7 +51,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if credential param format incorrect', done => {
test('should return error if credential param format incorrect', done => {
const alteredRequest = createAlteredRequest({ 'X-Amz-Credential':
'incorrectformat' }, 'query', request, query);
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
@ -59,7 +59,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if service set forth in ' +
test('should return error if service set forth in ' +
'credential param is not s3', done => {
const alteredRequest = createAlteredRequest({ 'X-Amz-Credential':
'accessKey1/20160208/us-east-1/EC2/aws4_request' },
@ -69,7 +69,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if requestType set forth in ' +
test('should return error if requestType set forth in ' +
'credential param is not aws4_request', done => {
const alteredRequest = createAlteredRequest({ 'X-Amz-Credential':
'accessKey1/20160208/us-east-1/s3/aws2_request' },
@ -79,7 +79,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if undefined X-Amz-SignedHeaders param', done => {
test('should return error if undefined X-Amz-SignedHeaders param', done => {
const alteredRequest = createAlteredRequest({ 'X-Amz-SignedHeaders':
undefined }, 'query', request, query);
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
@ -87,7 +87,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if undefined X-Amz-Signature param', done => {
test('should return error if undefined X-Amz-Signature param', done => {
const alteredRequest = createAlteredRequest({ 'X-Amz-Signature':
undefined }, 'query', request, query);
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
@ -95,7 +95,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if host is not included as signed header', done => {
test('should return error if host is not included as signed header', done => {
const alteredRequest = createAlteredRequest({ 'X-Amz-SignedHeaders':
'none' }, 'query', request, query);
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
@ -103,7 +103,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if an x-amz header is not included as signed ' +
test('should return error if an x-amz header is not included as signed ' +
'header but is in request', done => {
const alteredRequest = createAlteredRequest({
'x-amz-acl': 'public' }, 'headers', request, headers);
@ -112,7 +112,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if an x-scal header is not included as signed ' +
test('should return error if an x-scal header is not included as signed ' +
'header but is in request', done => {
const alteredRequest = createAlteredRequest({
'x-scal-encryption': 'true' }, 'headers', request, headers);
@ -121,7 +121,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if undefined X-Amz-Date param', done => {
test('should return error if undefined X-Amz-Date param', done => {
const alteredRequest = createAlteredRequest({ 'X-Amz-Date':
undefined }, 'query', request, query);
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
@ -129,7 +129,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if undefined X-Amz-Expires param', done => {
test('should return error if undefined X-Amz-Expires param', done => {
const alteredRequest = createAlteredRequest({ 'X-Amz-Expires':
undefined }, 'query', request, query);
const res = queryAuthCheck(alteredRequest, log, alteredRequest.query);
@ -137,7 +137,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if X-Amz-Expires param ' +
test('should return error if X-Amz-Expires param ' +
'is less than 1', done => {
const alteredRequest = createAlteredRequest({ 'X-Amz-Expires':
0 }, 'query', request, query);
@ -146,7 +146,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if X-Amz-Expires param ' +
test('should return error if X-Amz-Expires param ' +
'is greater than 604800', done => {
// Greater than 604800 seconds (7 days)
const alteredRequest = createAlteredRequest({ 'X-Amz-Expires':
@ -156,7 +156,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if X-Amz-Date param is in the future', done => {
test('should return error if X-Amz-Date param is in the future', done => {
// 2095 instead of 2016
const alteredRequest = createAlteredRequest({
'X-Amz-Date': '20950208T234304Z',
@ -167,7 +167,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if X-Amz-Date param is too old', done => {
test('should return error if X-Amz-Date param is too old', done => {
const alteredRequest = createAlteredRequest({
'X-Amz-Date': '20160208T234304Z',
}, 'query', request, query);
@ -176,7 +176,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should return error if scope date from X-Amz-Credential param' +
test('should return error if scope date from X-Amz-Credential param' +
'does not match date from X-Amz-Date param', done => {
const clock = lolex.install(1454974984001);
const alteredRequest = createAlteredRequest({
@ -189,7 +189,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should successfully return v4 and no error', done => {
test('should successfully return v4 and no error', done => {
// Freezes time so date created within function will be Feb 8, 2016
// (within 15 minutes of timestamp in request)
const clock = lolex.install(1454974984001);
@ -200,7 +200,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should successfully return v4 and no error if X-Amz-Expires param ' +
test('should successfully return v4 and no error if X-Amz-Expires param ' +
'is 604800 (7 days)', done => {
// Freezes time so date created within function will be Feb 8, 2016
const clock = lolex.install(1454974984001);
@ -213,7 +213,7 @@ describe('v4 queryAuthCheck', () => {
done();
});
it('should successfully return v4 and no error if X-Amz-Expires param ' +
test('should successfully return v4 and no error if X-Amz-Expires param ' +
'is less thant 604800 (7 days)', done => {
// Freezes time so date created within function will be Feb 8, 2016
const clock = lolex.install(1454974984001);

View File

@ -7,7 +7,7 @@ const calculateSigningKey =
.calculateSigningKey;
describe('v4 signing key calculation', () => {
it('should calculate a signing key in accordance with AWS rules', () => {
test('should calculate a signing key in accordance with AWS rules', () => {
const secretKey = 'verySecretKey1';
const region = 'us-east-1';
const scopeDate = '20160209';

View File

@ -14,7 +14,7 @@ const DummyRequestLogger = require('../../helpers').DummyRequestLogger;
const log = new DummyRequestLogger();
describe('convertAmzTimeToMs function', () => {
it('should convert ISO8601Timestamp format without ' +
test('should convert ISO8601Timestamp format without ' +
'dashes or colons, e.g. 20160202T220410Z to milliseconds since ' +
'Unix epoch', () => {
const input = '20160202T220410Z';
@ -25,7 +25,7 @@ describe('convertAmzTimeToMs function', () => {
});
describe('convertUTCtoISO8601 function', () => {
it('should UTC timestamp to ISO8601 timestamp', () => {
test('should UTC timestamp to ISO8601 timestamp', () => {
const input = 'Sun, 08 Feb 2015 20:14:05 GMT';
const expectedOutput = '20150208T201405Z';
const actualOutput = convertUTCtoISO8601(input);
@ -35,17 +35,17 @@ describe('convertUTCtoISO8601 function', () => {
describe('checkTimeSkew function', () => {
let clock;
before(() => {
beforeAll(() => {
// Time is 2016-03-17T18:22:01.033Z
clock = lolex.install(1458238921033);
});
after(() => {
afterAll(() => {
clock.uninstall();
});
// Our default expiry for header auth check is 15 minutes (in secs)
const expiry = (15 * 60);
it('should allow requests with timestamps under 15 minutes ' +
test('should allow requests with timestamps under 15 minutes ' +
'in the future', () => {
const timestamp14MinInFuture = '20160317T183601033Z';
const expectedOutput = false;
@ -54,7 +54,7 @@ describe('checkTimeSkew function', () => {
assert.strictEqual(actualOutput, expectedOutput);
});
it('should not allow requests with timestamps more than 15 minutes ' +
test('should not allow requests with timestamps more than 15 minutes ' +
'in the future', () => {
const timestamp16MinInFuture = '20160317T183801033Z';
const expectedOutput = true;
@ -63,7 +63,7 @@ describe('checkTimeSkew function', () => {
assert.strictEqual(actualOutput, expectedOutput);
});
it('should allow requests with timestamps earlier than the ' +
test('should allow requests with timestamps earlier than the ' +
'the expiry', () => {
const timestamp14MinInPast = '20160317T180801033Z';
const expectedOutput = false;
@ -72,7 +72,7 @@ describe('checkTimeSkew function', () => {
assert.strictEqual(actualOutput, expectedOutput);
});
it('should not allow requests with timestamps later ' +
test('should not allow requests with timestamps later ' +
'than the expiry', () => {
const timestamp16MinInPast = '20160317T180601033Z';
const expectedOutput = true;

View File

@ -40,7 +40,7 @@ function checkValueNotInDb(db, k, done) {
}
describe('IndexTransaction', () => {
it('should allow put', done => {
test('should allow put', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
@ -59,7 +59,7 @@ describe('IndexTransaction', () => {
});
});
it('should allow del', done => {
test('should allow del', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
@ -83,7 +83,7 @@ describe('IndexTransaction', () => {
});
});
it('should commit put and del combined', done => {
test('should commit put and del combined', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
@ -124,7 +124,7 @@ describe('IndexTransaction', () => {
.write(commitTransactionAndCheck);
});
it('should refuse types other than del and put', done => {
test('should refuse types other than del and put', done => {
const transaction = new IndexTransaction();
function tryPush() {
@ -147,7 +147,7 @@ describe('IndexTransaction', () => {
assert.throws(tryPush, validateError);
});
it('should refuse put without key', done => {
test('should refuse put without key', done => {
const transaction = new IndexTransaction();
function tryPush() {
@ -169,7 +169,7 @@ describe('IndexTransaction', () => {
assert.throws(tryPush, validateError);
});
it('should refuse del without key', done => {
test('should refuse del without key', done => {
const transaction = new IndexTransaction();
function tryPush() {
@ -190,7 +190,7 @@ describe('IndexTransaction', () => {
assert.throws(tryPush, validateError);
});
it('should refuse put without value', done => {
test('should refuse put without value', done => {
const transaction = new IndexTransaction();
function tryPush() {
@ -212,7 +212,7 @@ describe('IndexTransaction', () => {
assert.throws(tryPush, validateError);
});
it('should refuse to commit without any ops', done => {
test('should refuse to commit without any ops', done => {
const transaction = new IndexTransaction();
transaction.commit(err => {
@ -224,7 +224,7 @@ describe('IndexTransaction', () => {
});
});
it('should refuse to commit twice', done => {
test('should refuse to commit twice', done => {
const transaction = new IndexTransaction(createDb());
transaction.push({
@ -250,7 +250,7 @@ describe('IndexTransaction', () => {
transaction.commit(tryCommitAgain);
});
it('should refuse add an op if already committed', done => {
test('should refuse add an op if already committed', done => {
const transaction = new IndexTransaction(createDb());
function push() {
@ -282,7 +282,7 @@ describe('IndexTransaction', () => {
transaction.commit(tryPushAgain);
});
it('should have a working put shortcut method', done => {
test('should have a working put shortcut method', done => {
const db = createDb();
const transaction = new IndexTransaction(db);
@ -297,7 +297,7 @@ describe('IndexTransaction', () => {
});
});
it('should have a working del shortcut method', done => {
test('should have a working del shortcut method', done => {
const db = createDb();
const transaction = new IndexTransaction(db);

View File

@ -5,7 +5,7 @@ const errors = require('../../index').errors;
describe('Errors: ', () => {
Object.keys(errors).forEach(index => {
it(`should return and instance of ${index} Error`, done => {
test(`should return and instance of ${index} Error`, done => {
assert.strictEqual(errors[index] instanceof Error, true,
'should be an instance of Error');
assert.strictEqual(errors[index].code, errorsJSON[index].code,
@ -18,7 +18,7 @@ describe('Errors: ', () => {
});
});
it('should allow custom error descriptions', () => {
test('should allow custom error descriptions', () => {
const originDescription = errors.NoSuchEntity.description;
const error =
errors.NoSuchEntity.customizeDescription('custom-description');

View File

@ -26,52 +26,74 @@ function cidrListMatchCheck(cidrList, ip, expectedRes) {
}
describe('Parse IP address', () => {
it('should parse IPv4 address',
() => parseValidIpCheck('192.168.1.1', ipaddr.IPv4));
test(
'should parse IPv4 address',
() => parseValidIpCheck('192.168.1.1', ipaddr.IPv4)
);
it('should parse IPv6 address',
() => parseValidIpCheck('2001:cdba::3257:9652', ipaddr.IPv6));
test(
'should parse IPv6 address',
() => parseValidIpCheck('2001:cdba::3257:9652', ipaddr.IPv6)
);
it('should parse IPv4 mapped IPv6 address',
test(
'should parse IPv4 mapped IPv6 address',
// ::ffff:c0a8:101 mapped for 192.168.1.1
() => parseValidIpCheck('::ffff:c0a8:101', ipaddr.IPv4));
() => parseValidIpCheck('::ffff:c0a8:101', ipaddr.IPv4)
);
['260.384.2.1', 'INVALID', '', null, undefined].forEach(item => {
it(`should return empty object for invalid IP address: (${item})`,
() => parseInvalidIpCheck(item));
test(
`should return empty object for invalid IP address: (${item})`,
() => parseInvalidIpCheck(item)
);
});
});
describe('Check IP matches CIDR range', () => {
it('should match IP in a range',
() => cidrMatchCheck('192.168.1.0/24', '192.168.1.1', true));
test(
'should match IP in a range',
() => cidrMatchCheck('192.168.1.0/24', '192.168.1.1', true)
);
it('should not match IP not in a range',
() => cidrMatchCheck('192.168.1.0/24', '127.0.0.1', false));
test(
'should not match IP not in a range',
() => cidrMatchCheck('192.168.1.0/24', '127.0.0.1', false)
);
it('should match if range equals IP',
() => cidrMatchCheck('192.168.1.1', '192.168.1.1', true));
test(
'should match if range equals IP',
() => cidrMatchCheck('192.168.1.1', '192.168.1.1', true)
);
['260.384.2.1', 'INVALID', '', null, undefined].forEach(item => {
it(`should not match for invalid IP: (${item})`,
() => cidrMatchCheck('192.168.1.0/24', item, false));
test(
`should not match for invalid IP: (${item})`,
() => cidrMatchCheck('192.168.1.0/24', item, false)
);
});
});
describe('Check IP matches a list of CIDR ranges', () => {
it('should match IP in a valid range',
test(
'should match IP in a valid range',
() => cidrListMatchCheck(['192.168.1.0/24', '192.168.100.14/24',
'2001:db8::'], '192.168.100.1', true));
'2001:db8::'], '192.168.100.1', true)
);
[
[['127.0.0.1'], '127.0.0.2'],
[['192.168.1.1'], '192.168.1.1'],
].forEach(item =>
it(`should match IP ${item[0][0]} without CIDR range`,
() => cidrListMatchCheck(item[0], item[1], true))
test(
`should match IP ${item[0][0]} without CIDR range`,
() => cidrListMatchCheck(item[0], item[1], true)
)
);
it('should not range match if CIDR range is not provided',
() => cidrListMatchCheck(['192.168.1.1'], '192.168.1.3', false));
test(
'should not range match if CIDR range is not provided',
() => cidrListMatchCheck(['192.168.1.1'], '192.168.1.3', false)
);
});

View File

@ -5,27 +5,26 @@ const jsutil = require('../../index').jsutil;
describe('JSUtil', () => {
describe('once', () => {
it('should call the wrapped function only once when invoked ' +
'multiple times',
done => {
let value = 42;
let value2 = 51;
test('should call the wrapped function only once when invoked ' +
'multiple times', done => {
let value = 42;
let value2 = 51;
const wrapOnce = jsutil.once(expectArg => {
assert.strictEqual(expectArg, 'foo');
value += 1;
return value;
});
const wrapOnce2 = jsutil.once(expectArg => {
assert.strictEqual(expectArg, 'foo2');
value2 += 1;
return value2;
});
assert.strictEqual(wrapOnce('foo'), 43);
assert.strictEqual(wrapOnce2('foo2'), 52);
assert.strictEqual(wrapOnce('bar'), 43);
assert.strictEqual(wrapOnce2('bar2'), 52);
done();
});
const wrapOnce = jsutil.once(expectArg => {
assert.strictEqual(expectArg, 'foo');
value += 1;
return value;
});
const wrapOnce2 = jsutil.once(expectArg => {
assert.strictEqual(expectArg, 'foo2');
value2 += 1;
return value2;
});
assert.strictEqual(wrapOnce('foo'), 43);
assert.strictEqual(wrapOnce2('foo2'), 52);
assert.strictEqual(wrapOnce('bar'), 43);
assert.strictEqual(wrapOnce2('bar2'), 52);
done();
});
});
});

View File

@ -30,7 +30,7 @@ describe('Matrix', () => {
}).execute();
describe('Check if matrix was well generated', () => {
it('Has generated 40 matrix', done => {
test('Has generated 40 matrix', done => {
const callback = () => {
if (numberOfCall === 40) {
done();
@ -66,7 +66,7 @@ describe('Matrix', () => {
}).execute();
describe('Check if matrix was well generated', () => {
it('Should was call 20 times per auth value', done => {
test('Should was call 20 times per auth value', done => {
const callback = () => {
if (numberOfCallV2 === 20 && numberOfCallV4 === 20) {
done();
@ -107,7 +107,7 @@ describe('Matrix', () => {
}).execute();
describe('Check if matrix was well generated', () => {
it('All exception was called', done => {
test('All exception was called', done => {
const callback = () => {
if (callAbcd === true && callUndefined === true) {
done();
@ -150,7 +150,7 @@ describe('Matrix', () => {
}).execute();
describe('Check if matrix was well generated', () => {
it('All exception was called', done => {
test('All exception was called', done => {
const callback = () => {
if (callAbcd === true && callUndefined === true) {
done();
@ -182,7 +182,7 @@ describe('Matrix', () => {
}, 'should generate matrix').execute();
describe('Check if matrix was well generated', () => {
it('Has been called', done => {
test('Has been called', done => {
const callback = () => {
if (hasBeenCalled === true) {
done();
@ -207,7 +207,7 @@ describe('Matrix', () => {
} catch (e) {
anExceptionWasFound = true;
}
it('An exception was launched', done => {
test('An exception was launched', done => {
assert.equal(anExceptionWasFound, true);
done();
});
@ -226,7 +226,7 @@ describe('Matrix', () => {
} catch (e) {
anExceptionWasFound = true;
}
it('An exception was launched', done => {
test('An exception was launched', done => {
assert.equal(anExceptionWasFound, true);
done();
});

View File

@ -28,31 +28,33 @@ describe('StatsClient class', () => {
afterEach(() => redisClient.clear(() => {}));
it('should correctly record a new request by default one increment',
done => {
async.series([
next => {
statsClient.reportNewRequest(id, (err, res) => {
assert.ifError(err);
test(
'should correctly record a new request by default one increment',
done => {
async.series([
next => {
statsClient.reportNewRequest(id, (err, res) => {
assert.ifError(err);
const expected = [[null, 1], [null, 1]];
assert.deepEqual(res, expected);
next();
});
},
next => {
statsClient.reportNewRequest(id, (err, res) => {
assert.ifError(err);
const expected = [[null, 1], [null, 1]];
assert.deepEqual(res, expected);
next();
});
},
next => {
statsClient.reportNewRequest(id, (err, res) => {
assert.ifError(err);
const expected = [[null, 2], [null, 1]];
assert.deepEqual(res, expected);
next();
});
},
], done);
});
const expected = [[null, 2], [null, 1]];
assert.deepEqual(res, expected);
next();
});
},
], done);
}
);
it('should record new requests by defined amount increments', done => {
test('should record new requests by defined amount increments', done => {
function noop() {}
async.series([
@ -86,7 +88,7 @@ describe('StatsClient class', () => {
], done);
});
it('should correctly record a 500 on the server', done => {
test('should correctly record a 500 on the server', done => {
statsClient.report500(id, (err, res) => {
assert.ifError(err);
@ -96,7 +98,7 @@ describe('StatsClient class', () => {
});
});
it('should respond back with total requests', done => {
test('should respond back with total requests', done => {
async.series([
next => {
statsClient.reportNewRequest(id, err => {

View File

@ -108,7 +108,7 @@ describe('ARN object model', () => {
isIAMUser: false,
isIAMRole: false,
},
].forEach(arnTest => it(`should accept ARN "${arnTest.arn}"`, () => {
].forEach(arnTest => test(`should accept ARN "${arnTest.arn}"`, () => {
const arnObj = ARN.createFromString(arnTest.arn);
assert(arnObj instanceof ARN);
assert.strictEqual(arnObj.getService(), arnTest.service);
@ -128,7 +128,7 @@ describe('ARN object model', () => {
'arn:aws:xxx::123456789012:role/backbeat',
'arn:aws:s3::123456789012345:role/backbeat',
'arn:aws:s3::12345678901b:role/backbeat',
].forEach(arn => it(`should fail with invalid ARN "${arn}"`, () => {
].forEach(arn => test(`should fail with invalid ARN "${arn}"`, () => {
const res = ARN.createFromString(arn);
assert.notStrictEqual(res.error, undefined);
}));

View File

@ -136,7 +136,7 @@ Object.keys(acl).forEach(
describe('serialize/deSerialize on BucketInfo class', () => {
const serialized = dummyBucket.serialize();
it('should serialize', done => {
test('should serialize', done => {
assert.strictEqual(typeof serialized, 'string');
const bucketInfos = {
acl: dummyBucket._acl,
@ -163,7 +163,7 @@ Object.keys(acl).forEach(
done();
});
it('should deSerialize into an instance of BucketInfo', done => {
test('should deSerialize into an instance of BucketInfo', done => {
const serialized = dummyBucket.serialize();
const deSerialized = BucketInfo.deSerialize(serialized);
assert.strictEqual(typeof deSerialized, 'object');
@ -174,16 +174,15 @@ Object.keys(acl).forEach(
});
describe('constructor', () => {
it('this should have the right BucketInfo types',
() => {
assert.strictEqual(typeof dummyBucket.getName(), 'string');
assert.strictEqual(typeof dummyBucket.getOwner(), 'string');
assert.strictEqual(typeof dummyBucket.getOwnerDisplayName(),
'string');
assert.strictEqual(typeof dummyBucket.getCreationDate(),
'string');
});
it('this should have the right acl\'s types', () => {
test('this should have the right BucketInfo types', () => {
assert.strictEqual(typeof dummyBucket.getName(), 'string');
assert.strictEqual(typeof dummyBucket.getOwner(), 'string');
assert.strictEqual(typeof dummyBucket.getOwnerDisplayName(),
'string');
assert.strictEqual(typeof dummyBucket.getCreationDate(),
'string');
});
test('this should have the right acl\'s types', () => {
assert.strictEqual(typeof dummyBucket.getAcl(), 'object');
assert.strictEqual(
typeof dummyBucket.getAcl().Canned, 'string');
@ -193,11 +192,11 @@ Object.keys(acl).forEach(
assert(Array.isArray(dummyBucket.getAcl().READ));
assert(Array.isArray(dummyBucket.getAcl().READ_ACP));
});
it('this should have the right acls', () => {
test('this should have the right acls', () => {
assert.deepStrictEqual(dummyBucket.getAcl(),
acl[aclObj] || emptyAcl);
});
it('this should have the right website config types', () => {
test('this should have the right website config types', () => {
const websiteConfig = dummyBucket.getWebsiteConfiguration();
assert.strictEqual(typeof websiteConfig, 'object');
assert.strictEqual(typeof websiteConfig._indexDocument,
@ -206,7 +205,7 @@ Object.keys(acl).forEach(
'string');
assert(Array.isArray(websiteConfig._routingRules));
});
it('this should have the right cors config types', () => {
test('this should have the right cors config types', () => {
const cors = dummyBucket.getCors();
assert(Array.isArray(cors));
assert(Array.isArray(cors[0].allowedMethods));
@ -220,62 +219,61 @@ Object.keys(acl).forEach(
});
describe('getters on BucketInfo class', () => {
it('getACl should return the acl', () => {
test('getACl should return the acl', () => {
assert.deepStrictEqual(dummyBucket.getAcl(),
acl[aclObj] || emptyAcl);
});
it('getName should return name', () => {
test('getName should return name', () => {
assert.deepStrictEqual(dummyBucket.getName(), bucketName);
});
it('getOwner should return owner', () => {
test('getOwner should return owner', () => {
assert.deepStrictEqual(dummyBucket.getOwner(), owner);
});
it('getOwnerDisplayName should return ownerDisplayName', () => {
test('getOwnerDisplayName should return ownerDisplayName', () => {
assert.deepStrictEqual(dummyBucket.getOwnerDisplayName(),
ownerDisplayName);
});
it('getCreationDate should return creationDate', () => {
test('getCreationDate should return creationDate', () => {
assert.deepStrictEqual(dummyBucket.getCreationDate(), testDate);
});
it('getVersioningConfiguration should return configuration', () => {
test('getVersioningConfiguration should return configuration', () => {
assert.deepStrictEqual(dummyBucket.getVersioningConfiguration(),
testVersioningConfiguration);
});
it('getWebsiteConfiguration should return configuration', () => {
test('getWebsiteConfiguration should return configuration', () => {
assert.deepStrictEqual(dummyBucket.getWebsiteConfiguration(),
testWebsiteConfiguration);
});
it('getLocationConstraint should return locationConstraint', () => {
test('getLocationConstraint should return locationConstraint', () => {
assert.deepStrictEqual(dummyBucket.getLocationConstraint(),
testLocationConstraint);
});
it('getCors should return CORS configuration', () => {
test('getCors should return CORS configuration', () => {
assert.deepStrictEqual(dummyBucket.getCors(),
testCorsConfiguration);
});
it('getLifeCycleConfiguration should return configuration', () => {
test('getLifeCycleConfiguration should return configuration', () => {
assert.deepStrictEqual(dummyBucket.getLifecycleConfiguration(),
testLifecycleConfiguration);
});
});
describe('setters on BucketInfo class', () => {
it('setCannedAcl should set acl.Canned', () => {
test('setCannedAcl should set acl.Canned', () => {
const testAclCanned = 'public-read';
dummyBucket.setCannedAcl(testAclCanned);
assert.deepStrictEqual(
dummyBucket.getAcl().Canned, testAclCanned);
});
it('setSpecificAcl should set the acl of a specified bucket',
() => {
const typeOfGrant = 'WRITE';
dummyBucket.setSpecificAcl(owner, typeOfGrant);
const lastIndex =
dummyBucket.getAcl()[typeOfGrant].length - 1;
assert.deepStrictEqual(
dummyBucket.getAcl()[typeOfGrant][lastIndex], owner);
});
it('setFullAcl should set full set of ACLs', () => {
test('setSpecificAcl should set the acl of a specified bucket', () => {
const typeOfGrant = 'WRITE';
dummyBucket.setSpecificAcl(owner, typeOfGrant);
const lastIndex =
dummyBucket.getAcl()[typeOfGrant].length - 1;
assert.deepStrictEqual(
dummyBucket.getAcl()[typeOfGrant][lastIndex], owner);
});
test('setFullAcl should set full set of ACLs', () => {
const newACLs = {
Canned: '',
FULL_CONTROL: ['someOtherAccount'],
@ -290,30 +288,29 @@ Object.keys(acl).forEach(
assert.deepStrictEqual(dummyBucket.getAcl().WRITE_ACP,
['yetAnotherAccount']);
});
it('setName should set the bucket name', () => {
test('setName should set the bucket name', () => {
const newName = 'newName';
dummyBucket.setName(newName);
assert.deepStrictEqual(dummyBucket.getName(), newName);
});
it('setOwner should set the owner', () => {
test('setOwner should set the owner', () => {
const newOwner = 'newOwner';
dummyBucket.setOwner(newOwner);
assert.deepStrictEqual(dummyBucket.getOwner(), newOwner);
});
it('getOwnerDisplayName should return ownerDisplayName', () => {
test('getOwnerDisplayName should return ownerDisplayName', () => {
const newOwnerDisplayName = 'newOwnerDisplayName';
dummyBucket.setOwnerDisplayName(newOwnerDisplayName);
assert.deepStrictEqual(dummyBucket.getOwnerDisplayName(),
newOwnerDisplayName);
});
it('setLocationConstraint should set the locationConstraint',
() => {
const newLocation = 'newLocation';
dummyBucket.setLocationConstraint(newLocation);
assert.deepStrictEqual(
dummyBucket.getLocationConstraint(), newLocation);
});
it('setVersioningConfiguration should set configuration', () => {
test('setLocationConstraint should set the locationConstraint', () => {
const newLocation = 'newLocation';
dummyBucket.setLocationConstraint(newLocation);
assert.deepStrictEqual(
dummyBucket.getLocationConstraint(), newLocation);
});
test('setVersioningConfiguration should set configuration', () => {
const newVersioningConfiguration =
{ Status: 'Enabled', MfaDelete: 'Enabled' };
dummyBucket
@ -321,7 +318,7 @@ Object.keys(acl).forEach(
assert.deepStrictEqual(dummyBucket.getVersioningConfiguration(),
newVersioningConfiguration);
});
it('setWebsiteConfiguration should set configuration', () => {
test('setWebsiteConfiguration should set configuration', () => {
const newWebsiteConfiguration = {
redirectAllRequestsTo: {
hostName: 'www.example.com',
@ -333,14 +330,14 @@ Object.keys(acl).forEach(
assert.deepStrictEqual(dummyBucket.getWebsiteConfiguration(),
newWebsiteConfiguration);
});
it('setCors should set CORS configuration', () => {
test('setCors should set CORS configuration', () => {
const newCorsConfiguration =
[{ allowedMethods: ['PUT'], allowedOrigins: ['*'] }];
dummyBucket.setCors(newCorsConfiguration);
assert.deepStrictEqual(dummyBucket.getCors(),
newCorsConfiguration);
});
it('setReplicationConfiguration should set replication ' +
test('setReplicationConfiguration should set replication ' +
'configuration', () => {
const newReplicationConfig = {
Role: 'arn:aws:iam::123456789012:role/src-resource,' +
@ -357,7 +354,7 @@ Object.keys(acl).forEach(
};
dummyBucket.setReplicationConfiguration(newReplicationConfig);
});
it('setLifecycleConfiguration should set lifecycle ' +
test('setLifecycleConfiguration should set lifecycle ' +
'configuration', () => {
const newLifecycleConfig = {
rules: [

View File

@ -264,14 +264,13 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
tagObj = {};
});
it('should return MalformedXML error if request xml is empty', done => {
test('should return MalformedXML error if request xml is empty', done => {
const errMessage = 'request xml is undefined or empty';
checkError('', 'MalformedXML', errMessage, done);
});
requiredTags.forEach(t => {
it(`should return ${t.error} error if ${t.tag} tag is missing`,
done => {
test(`should return ${t.error} error if ${t.tag} tag is missing`, done => {
generateParsedXml(t.tag, null, parsedXml => {
checkError(parsedXml, t.error, t.errMessage, done);
});
@ -280,8 +279,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
notImplementedActions.forEach(action => {
const expError = 'NotImplemented';
it(`should return ${expError} error for ${action.tag} action`,
done => {
test(`should return ${expError} error for ${action.tag} action`, done => {
generateParsedXml('Action', action, parsedXml => {
checkError(parsedXml, expError, action.errMessage, done);
});
@ -289,8 +287,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
});
invalidActions.forEach(a => {
it(`should return ${a.error} for ${a.label} action error`,
done => {
test(`should return ${a.error} for ${a.label} action error`, done => {
generateParsedXml('Action', a, parsedXml => {
checkError(parsedXml, a.error, a.errMessage, done);
});
@ -298,15 +295,17 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
});
invalidFilters.forEach(filter => {
it(`should return ${filter.error} for ${filter.label} filter error`,
done => {
generateParsedXml('Filter', filter, parsedXml => {
checkError(parsedXml, filter.error, filter.errMessage, done);
});
});
test(
`should return ${filter.error} for ${filter.label} filter error`,
done => {
generateParsedXml('Filter', filter, parsedXml => {
checkError(parsedXml, filter.error, filter.errMessage, done);
});
}
);
});
it('should return MalformedXML error if invalid status', done => {
test('should return MalformedXML error if invalid status', done => {
tagObj.status = 'foo';
const errMessage = 'Status is not valid';
generateParsedXml('Status', tagObj, parsedXml => {
@ -314,7 +313,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
});
});
it('should return InvalidRequest error if ID not unique', done => {
test('should return InvalidRequest error if ID not unique', done => {
tagObj.rule = 'not-unique-id';
const errMessage = 'Rule ID must be unique';
generateParsedXml('Rule', tagObj, parsedXml => {
@ -322,7 +321,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
});
});
it('should return InvalidArgument error if invalid ID', done => {
test('should return InvalidArgument error if invalid ID', done => {
tagObj.id = 'a'.repeat(256);
const errMessage = 'Rule ID is greater than 255 characters long';
generateParsedXml('ID', tagObj, parsedXml => {
@ -330,7 +329,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
});
});
it('should return MalformedXML error if over 1000 rules', done => {
test('should return MalformedXML error if over 1000 rules', done => {
tagObj.rule = 'too-many-rules';
const errMessage = 'request xml includes over max limit of 1000 rules';
generateParsedXml('Rule', tagObj, parsedXml => {
@ -338,7 +337,7 @@ describe('LifecycleConfiguration class getLifecycleConfiguration', () => {
});
});
it('should use last listed Prefix if multiple Prefixes included', done => {
test('should use last listed Prefix if multiple Prefixes included', done => {
tagObj.label = 'mult-prefixes';
tagObj.lastPrefix = 'coco';
generateParsedXml('Filter', tagObj, parsedXml => {

View File

@ -19,14 +19,14 @@ const testRoutingRuleParams = {
};
describe('RoutingRule class', () => {
it('should initialize even if no parameters are provided', done => {
test('should initialize even if no parameters are provided', done => {
const routingRule = new RoutingRule();
assert.strictEqual(routingRule._redirect, undefined);
assert.strictEqual(routingRule._condition, undefined);
done();
});
it('should return a new routing rule', done => {
test('should return a new routing rule', done => {
const routingRule = new RoutingRule(testRoutingRuleParams);
assert.deepStrictEqual(routingRule._redirect,
testRoutingRuleParams.redirect);
@ -35,14 +35,14 @@ describe('RoutingRule class', () => {
done();
});
it('getRedirect should fetch the instance\'s redirect', done => {
test('getRedirect should fetch the instance\'s redirect', done => {
const routingRule = new RoutingRule(testRoutingRuleParams);
assert.deepStrictEqual(routingRule.getRedirect(),
testRoutingRuleParams.redirect);
done();
});
it('getCondition should fetch the instance\'s condition', done => {
test('getCondition should fetch the instance\'s condition', done => {
const routingRule = new RoutingRule(testRoutingRuleParams);
assert.deepStrictEqual(routingRule.getCondition(),
testRoutingRuleParams.condition);
@ -51,7 +51,7 @@ describe('RoutingRule class', () => {
});
describe('WebsiteConfiguration class', () => {
it('should initialize even if no parameters are provided', done => {
test('should initialize even if no parameters are provided', done => {
const websiteConfig = new WebsiteConfiguration();
assert.strictEqual(websiteConfig._indexDocument, undefined);
assert.strictEqual(websiteConfig._errorDocument, undefined);
@ -60,7 +60,7 @@ describe('WebsiteConfiguration class', () => {
done();
});
it('should initialize indexDocument, errorDocument during construction ' +
test('should initialize indexDocument, errorDocument during construction ' +
'if provided in params', done => {
const testWebsiteConfigParams = {
indexDocument: 'index.html',
@ -72,7 +72,7 @@ describe('WebsiteConfiguration class', () => {
done();
});
it('should initialize redirectAllRequestsTo during construction if ' +
test('should initialize redirectAllRequestsTo during construction if ' +
'provided in params', done => {
const testWebsiteConfigParams = {
redirectAllRequestsTo: {
@ -88,7 +88,7 @@ describe('WebsiteConfiguration class', () => {
done();
});
it('should initialize routingRules properly during construction from ' +
test('should initialize routingRules properly during construction from ' +
'array of RoutingRule class instances', done => {
const testWebsiteConfigParams = {
routingRules: [],
@ -103,7 +103,7 @@ describe('WebsiteConfiguration class', () => {
done();
});
it('should initialize routingRules properly during construction from ' +
test('should initialize routingRules properly during construction from ' +
'array of plain objects', done => {
const testWebsiteConfigParams = {
routingRules: [],
@ -129,21 +129,21 @@ describe('WebsiteConfiguration class', () => {
});
describe('Getter/setter methods', () => {
it('for indexDocument should get/set indexDocument property', done => {
test('for indexDocument should get/set indexDocument property', done => {
const websiteConfig = new WebsiteConfiguration();
websiteConfig.setIndexDocument('index.html');
assert.strictEqual(websiteConfig.getIndexDocument(), 'index.html');
done();
});
it('for errorDocument should get/set errorDocument property', done => {
test('for errorDocument should get/set errorDocument property', done => {
const websiteConfig = new WebsiteConfiguration();
websiteConfig.setErrorDocument('error.html');
assert.strictEqual(websiteConfig.getErrorDocument(), 'error.html');
done();
});
it('for redirectAllRequestsTo should get/set redirectAllRequestsTo ' +
test('for redirectAllRequestsTo should get/set redirectAllRequestsTo ' +
'object', done => {
const websiteConfig = new WebsiteConfiguration();
const redirectAllRequestsTo = {
@ -156,7 +156,7 @@ describe('WebsiteConfiguration class', () => {
done();
});
it('for routingRules should get/set routingRules', done => {
test('for routingRules should get/set routingRules', done => {
const websiteConfig = new WebsiteConfiguration();
const routingRules = [testRoutingRuleParams];
websiteConfig.setRoutingRules(routingRules);
@ -169,7 +169,7 @@ describe('WebsiteConfiguration class', () => {
});
});
it('addRoutingRule should add a RoutingRule to routingRules', done => {
test('addRoutingRule should add a RoutingRule to routingRules', done => {
const websiteConfig = new WebsiteConfiguration();
websiteConfig.addRoutingRule(testRoutingRuleParams);
assert(Array.isArray(websiteConfig._routingRules));

View File

@ -99,12 +99,12 @@ describe('ObjectMD class setters/getters', () => {
dataStoreVersionId: '',
}],
['DataStoreName', null, ''],
].forEach(test => {
const property = test[0];
const testValue = test[1];
const defaultValue = test[2];
].forEach(t => {
const property = t[0];
const testValue = t[1];
const defaultValue = t[2];
const testName = testValue === null ? 'get default' : 'get/set';
it(`${testName}: ${property}`, () => {
test(`${testName}: ${property}`, () => {
if (testValue !== null) {
md[`set${property}`](testValue);
}
@ -120,7 +120,7 @@ describe('ObjectMD class setters/getters', () => {
});
});
it('ObjectMD::setReplicationSiteStatus', () => {
test('ObjectMD::setReplicationSiteStatus', () => {
md.setReplicationInfo({
backends: [{
site: 'zenko',
@ -136,7 +136,7 @@ describe('ObjectMD class setters/getters', () => {
}]);
});
it('ObjectMD::setReplicationBackends', () => {
test('ObjectMD::setReplicationBackends', () => {
md.setReplicationBackends([{
site: 'a',
status: 'b',
@ -149,12 +149,12 @@ describe('ObjectMD class setters/getters', () => {
}]);
});
it('ObjectMD::setReplicationStorageClass', () => {
test('ObjectMD::setReplicationStorageClass', () => {
md.setReplicationStorageClass('a');
assert.strictEqual(md.getReplicationStorageClass(), 'a');
});
it('ObjectMD::getReplicationSiteStatus', () => {
test('ObjectMD::getReplicationSiteStatus', () => {
md.setReplicationInfo({
backends: [{
site: 'zenko',
@ -165,7 +165,7 @@ describe('ObjectMD class setters/getters', () => {
assert.strictEqual(md.getReplicationSiteStatus('zenko'), 'PENDING');
});
it('ObjectMD::setReplicationSiteDataStoreVersionId', () => {
test('ObjectMD::setReplicationSiteDataStoreVersionId', () => {
md.setReplicationInfo({
backends: [{
site: 'zenko',
@ -181,7 +181,7 @@ describe('ObjectMD class setters/getters', () => {
}]);
});
it('ObjectMD::getReplicationSiteDataStoreVersionId', () => {
test('ObjectMD::getReplicationSiteDataStoreVersionId', () => {
md.setReplicationInfo({
backends: [{
site: 'zenko',
@ -195,7 +195,7 @@ describe('ObjectMD class setters/getters', () => {
});
describe('ObjectMD import from stored blob', () => {
it('should export and import correctly the latest model version', () => {
test('should export and import correctly the latest model version', () => {
const md = new ObjectMD();
const jsonMd = md.getSerialized();
const importedRes = ObjectMD.createFromBlob(jsonMd);
@ -204,7 +204,7 @@ describe('ObjectMD import from stored blob', () => {
assert.deepStrictEqual(md, importedMd);
});
it('should convert old location to new location', () => {
test('should convert old location to new location', () => {
const md = new ObjectMD();
const value = md.getValue();
value['md-model-version'] = 1;
@ -220,7 +220,7 @@ describe('ObjectMD import from stored blob', () => {
[{ key: 'stringLocation' }]);
});
it('should keep null location as is', () => {
test('should keep null location as is', () => {
const md = new ObjectMD();
const value = md.getValue();
value.location = null;
@ -234,7 +234,7 @@ describe('ObjectMD import from stored blob', () => {
assert.deepStrictEqual(importedMd.getValue().location, null);
});
it('should add dataStoreName attribute if missing', () => {
test('should add dataStoreName attribute if missing', () => {
const md = new ObjectMD();
const value = md.getValue();
value['md-model-version'] = 2;
@ -249,18 +249,20 @@ describe('ObjectMD import from stored blob', () => {
assert.notStrictEqual(valueImported.dataStoreName, undefined);
});
it('should return undefined for dataStoreVersionId if no object location',
() => {
const md = new ObjectMD();
const value = md.getValue();
const jsonMd = JSON.stringify(value);
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.strictEqual(importedRes.error, undefined);
const importedMd = importedRes.result;
assert.strictEqual(importedMd.getDataStoreVersionId(), undefined);
});
test(
'should return undefined for dataStoreVersionId if no object location',
() => {
const md = new ObjectMD();
const value = md.getValue();
const jsonMd = JSON.stringify(value);
const importedRes = ObjectMD.createFromBlob(jsonMd);
assert.strictEqual(importedRes.error, undefined);
const importedMd = importedRes.result;
assert.strictEqual(importedMd.getDataStoreVersionId(), undefined);
}
);
it('should get dataStoreVersionId if saved in object location', () => {
test('should get dataStoreVersionId if saved in object location', () => {
const md = new ObjectMD();
const dummyLocation = {
dataStoreVersionId: 'data-store-version-id',
@ -275,7 +277,7 @@ describe('ObjectMD import from stored blob', () => {
dummyLocation.dataStoreVersionId);
});
it('should return an error if blob is malformed JSON', () => {
test('should return an error if blob is malformed JSON', () => {
const importedRes = ObjectMD.createFromBlob('{BAD JSON}');
assert.notStrictEqual(importedRes.error, undefined);
assert.strictEqual(importedRes.result, undefined);
@ -283,7 +285,7 @@ describe('ObjectMD import from stored blob', () => {
});
describe('getAttributes static method', () => {
it('should return object metadata attributes', () => {
test('should return object metadata attributes', () => {
const attributes = ObjectMD.getAttributes();
const expectedResult = {
'owner-display-name': true,

View File

@ -22,7 +22,7 @@ describe('round robin hosts', () => {
defaultPort: 1002 });
});
it('should pick all hosts in turn', () => {
test('should pick all hosts in turn', () => {
const hostsPickCount = {
'1.2.3.0': 0,
'1.2.3.1': 0,
@ -39,7 +39,7 @@ describe('round robin hosts', () => {
assert.strictEqual(hostsPickCount['1.2.3.2'], 30);
});
it('should pick the same current host up to stickyCount ' +
test('should pick the same current host up to stickyCount ' +
'with pickHost()', () => {
const hostsPickCount = {
'1.2.3.0': 0,
@ -56,7 +56,7 @@ describe('round robin hosts', () => {
assert.strictEqual(hostsPickCount[curHost.host], 10);
});
it('should pick each host in turn with pickNextHost()', () => {
test('should pick each host in turn with pickNextHost()', () => {
const hostsPickCount = {
'1.2.3.0': 0,
'1.2.3.1': 0,
@ -73,7 +73,7 @@ describe('round robin hosts', () => {
assert.strictEqual(hostsPickCount['1.2.3.2'], 3);
});
it('should refuse if no valid host/port is given', () => {
test('should refuse if no valid host/port is given', () => {
assert.throws(() => new RoundRobin([]), Error);
assert.throws(() => new RoundRobin([{}]), Error);
assert.throws(() => new RoundRobin([
@ -100,7 +100,7 @@ describe('round robin hosts', () => {
new RoundRobin(['zenko.io', 'zenka.ia']);
});
it('should have set default port if not in bootstrap list', () => {
test('should have set default port if not in bootstrap list', () => {
// the current host should be picked 10 times in a row
const portMap = {
'1.2.3.0': 1000,

View File

@ -73,43 +73,42 @@ describe('network.Server: ', () => {
key: httpsRef.key,
ca: httpsRef.ca,
}],
].forEach(test => {
].forEach(t => {
function createServer() {
const ws = new Server(3000, log);
ws.setHttps(test[1].cert, test[1].key, test[1].ca, false);
ws.setHttps(t[1].cert, t[1].key, t[1].ca, false);
return ws;
}
describe(test[0], () => {
it('should start', done => {
describe(t[0], () => {
test('should start', done => {
const ws = createServer().onError(done).onListening(() => {
ws.onStop(done);
ws.stop();
}).start();
});
it('should return EADDRINUSE on binding port already taken',
done => {
const ws = createServer().onError(done)
.onListening(() => {
const bindingTimeout = setTimeout(() => {
const err =
'Server does not send an binding error';
ws.onStop(() => done(new Error(err))).stop();
}, 5000);
const ws2 = new Server(3000, log).onError(err => {
if (err.code === 'EADDRINUSE') {
clearTimeout(bindingTimeout);
return ws.onStop(done).stop();
}
test('should return EADDRINUSE on binding port already taken', done => {
const ws = createServer().onError(done)
.onListening(() => {
const bindingTimeout = setTimeout(() => {
const err =
'Server does not send an binding error';
ws.onStop(() => done(new Error(err))).stop();
}, 5000);
const ws2 = new Server(3000, log).onError(err => {
if (err.code === 'EADDRINUSE') {
clearTimeout(bindingTimeout);
return ws.onStop(() => done(err)).stop();
});
ws2.start();
}).start();
});
return ws.onStop(done).stop();
}
clearTimeout(bindingTimeout);
return ws.onStop(() => done(err)).stop();
});
ws2.start();
}).start();
});
it('should return InternalError when no request handler', done => {
test('should return InternalError when no request handler', done => {
const ws = createServer().onError(done).onListening(() => {
const requestTimeout = setTimeout(() => {
ws.onStop(() => done('No response received')).stop();
@ -130,7 +129,7 @@ describe('network.Server: ', () => {
}).start();
});
it('should return 200 OK with "done" as content', done => {
test('should return 200 OK with "done" as content', done => {
const ws = createServer().onError(done).onListening(() => {
const requestTimeout = setTimeout(() => {
ws.onStop(() => done('No response received')).stop();
@ -155,7 +154,7 @@ describe('network.Server: ', () => {
});
});
it('should fail when the server is twoWay', done => {
test('should fail when the server is twoWay', done => {
const ws = new Server(3000, log);
ws.setHttps(httpsRef.cert, httpsRef.key, httpsRef.ca, true);
ws.onError(done).onListening(() => {

View File

@ -26,8 +26,7 @@ describe('parseRangeSpec function', () => {
].forEach(testCase => {
const { rangeHeader, expectedRangeSpec } = testCase;
it(`should return ${expectedRangeSpec} on range "${rangeHeader}"`,
() => {
test(`should return ${expectedRangeSpec} on range "${rangeHeader}"`, () => {
const rangeSpec = parseRangeSpec(rangeHeader);
if (expectedRangeSpec.error) {
assert(rangeSpec.error);
@ -67,7 +66,7 @@ describe('getByteRangeFromSpec function', () => {
].forEach(testCase => {
const { rangeSpec, objectSize, expectedByteRange } = testCase;
it(`should transform spec ${rangeSpec} with object size ` +
test(`should transform spec ${rangeSpec} with object size ` +
`${objectSize} to byte range ${expectedByteRange}`, () => {
const byteRange = getByteRangeFromSpec(rangeSpec, objectSize);
if (expectedByteRange.error) {
@ -84,58 +83,58 @@ describe('getByteRangeFromSpec function', () => {
});
describe('parseRange function', () => {
it('should return an object with the start and end if range is '
test('should return an object with the start and end if range is '
+ 'valid', () => {
checkParseRange('bytes=0-9', 10, [0, 9]);
});
it('should set the end of the range at the total object length minus 1 ' +
test('should set the end of the range at the total object length minus 1 ' +
'if the provided end of range goes beyond the end of the object ' +
'length', () => {
checkParseRange('bytes=0-9', 8, [0, 7]);
});
it('should handle incomplete range specifier where only end offset is ' +
test('should handle incomplete range specifier where only end offset is ' +
'provided', () => {
checkParseRange('bytes=-500', 10000, [9500, 9999]);
});
it('should handle incomplete range specifier where only start ' +
test('should handle incomplete range specifier where only start ' +
'provided', () => {
checkParseRange('bytes=9500-', 10000, [9500, 9999]);
});
it('should return undefined for the range if the range header ' +
test('should return undefined for the range if the range header ' +
'format is invalid: missing equal', () => {
checkParseRange('bytes0-9', 10);
});
it('should return undefined for the range if the range header ' +
test('should return undefined for the range if the range header ' +
'format is invalid: missing dash', () => {
checkParseRange('bytes=09', 10);
});
it('should return undefined for the range if the range header ' +
test('should return undefined for the range if the range header ' +
'format is invalid: value invalid character', () => {
checkParseRange('bytes=%-4', 10);
});
it('should return undefined for the range if the range header ' +
test('should return undefined for the range if the range header ' +
'format is invalid: value not int', () => {
checkParseRange('bytes=4-a', 10);
});
it('should return undefined for the range if the range header ' +
test('should return undefined for the range if the range header ' +
'format is invalid: start > end', () => {
checkParseRange('bytes=5-4', 10);
});
it('should return undefined for the range if the range header ' +
test('should return undefined for the range if the range header ' +
'format is invalid: negative start bound', () => {
checkParseRange('bytes=-2-5', 10);
});
it('should return InvalidRange if the range of the resource ' +
test('should return InvalidRange if the range of the resource ' +
'does not cover the byte range', () => {
const rangeHeader = 'bytes=10-30';
const totalLength = 10;
@ -143,50 +142,49 @@ describe('parseRange function', () => {
assert.strictEqual(error.code, 416);
assert.strictEqual(range, undefined);
});
it('should return undefined for "bytes=-" request (invalid syntax) ',
() => {
test('should return undefined for "bytes=-" request (invalid syntax) ', () => {
checkParseRange('bytes=-', 10);
});
it('should return undefined for "bytes=-" request (invalid syntax, ' +
test('should return undefined for "bytes=-" request (invalid syntax, ' +
'empty object)', () => {
checkParseRange('bytes=-', 0);
});
it('should return undefined for "bytes=10-9" request (invalid syntax, ' +
test('should return undefined for "bytes=10-9" request (invalid syntax, ' +
'empty object)', () => {
checkParseRange('bytes=10-9', 0);
});
it('should return InvalidRange on 0-byte suffix range request', () => {
test('should return InvalidRange on 0-byte suffix range request', () => {
const rangeHeader = 'bytes=-0';
const { range, error } = parseRange(rangeHeader, 10);
assert.strictEqual(error.code, 416);
assert.strictEqual(range, undefined);
});
it('should return InvalidRange on 0-byte suffix range request ' +
test('should return InvalidRange on 0-byte suffix range request ' +
'(empty object)', () => {
const rangeHeader = 'bytes=-0';
const { range, error } = parseRange(rangeHeader, 0);
assert.strictEqual(error.code, 416);
assert.strictEqual(range, undefined);
});
it('should return undefined on suffix range request on empty ' +
test('should return undefined on suffix range request on empty ' +
'object', () => {
checkParseRange('bytes=-10', 0);
});
it('should return InvalidRange on empty object when only start==0 ' +
test('should return InvalidRange on empty object when only start==0 ' +
'provided', () => {
const rangeHeader = 'bytes=0-';
const { range, error } = parseRange(rangeHeader, 0);
assert.strictEqual(error.code, 416);
assert.strictEqual(range, undefined);
});
it('should return InvalidRange on empty object when only start!=0 ' +
test('should return InvalidRange on empty object when only start!=0 ' +
'provided', () => {
const rangeHeader = 'bytes=10-';
const { range, error } = parseRange(rangeHeader, 0);
assert.strictEqual(error.code, 416);
assert.strictEqual(range, undefined);
});
it('should return InvalidRange on empty object when start and end ' +
test('should return InvalidRange on empty object when start and end ' +
'are provided', () => {
const rangeHeader = 'bytes=10-30';
const { range, error } = parseRange(rangeHeader, 0);

View File

@ -53,17 +53,17 @@ describe('REST interface for blob data storage', () => {
});
}
before(done => {
beforeAll(done => {
setup(done);
});
after(done => {
afterAll(done => {
server.stop();
done();
});
describe('simple tests', () => {
it('should be able to PUT, GET and DELETE an object', done => {
test('should be able to PUT, GET and DELETE an object', done => {
const contents = 'This is the contents of the new object';
let objKey;
@ -133,7 +133,7 @@ describe('REST interface for blob data storage', () => {
`bytes ${expectedStart}-${expectedEnd}/${contents.length}`);
}
before(done => {
beforeAll(done => {
const rs = createReadStream(contents);
client.put(rs, contents.length, '1', (err, key) => {
assert.ifError(err);
@ -164,23 +164,22 @@ describe('REST interface for blob data storage', () => {
{ range: [contents.length - 1, undefined],
sliceArgs: [-1], contentRange: [contents.length - 1,
contents.length - 1] }]
.forEach((test, i) => {
const { range, sliceArgs, contentRange } = test;
it(`should get the correct range ${range[0]}-${range[1]}`,
done => {
client.get(
objKey, range,
(1000 + i).toString(), (err, resp) => {
assert.ifError(err);
const value = resp.read();
assert.strictEqual(
value.toString(),
contents.slice(...sliceArgs));
checkContentRange(resp, contentRange[0],
contentRange[1]);
done();
});
});
.forEach((t, i) => {
const { range, sliceArgs, contentRange } = t;
test(`should get the correct range ${range[0]}-${range[1]}`, done => {
client.get(
objKey, range,
(1000 + i).toString(), (err, resp) => {
assert.ifError(err);
const value = resp.read();
assert.strictEqual(
value.toString(),
contents.slice(...sliceArgs));
checkContentRange(resp, contentRange[0],
contentRange[1]);
done();
});
});
});
// queries returning 416 Requested Range Not Satisfiable
@ -190,23 +189,22 @@ describe('REST interface for blob data storage', () => {
{ range: [0, undefined], emptyObject: true },
{ range: [0, 10], emptyObject: true },
{ range: [undefined, 0], emptyObject: true }]
.forEach((test, i) => {
const { range, emptyObject } = test;
it(`should get error 416 on ${range[0]}-${range[1]}` +
`${emptyObject ? ' (empty object)' : ''}`,
done => {
const key = (emptyObject ? emptyObjKey : objKey);
client.get(
key, range,
(2000 + i).toString(), err => {
assert(err);
assert.strictEqual(err.code, 416);
done();
});
});
.forEach((t, i) => {
const { range, emptyObject } = t;
test(`should get error 416 on ${range[0]}-${range[1]}` +
`${emptyObject ? ' (empty object)' : ''}`, done => {
const key = (emptyObject ? emptyObjKey : objKey);
client.get(
key, range,
(2000 + i).toString(), err => {
assert(err);
assert.strictEqual(err.code, 416);
done();
});
});
});
it('should get 200 OK on both range boundaries undefined', done => {
test('should get 200 OK on both range boundaries undefined', done => {
client.get(objKey, [undefined, undefined], '3001', (err, resp) => {
assert.ifError(err);
const value = resp.read();
@ -214,14 +212,13 @@ describe('REST interface for blob data storage', () => {
done();
});
});
it('should get 200 OK on range query "bytes=-10" of empty object',
done => {
client.get(emptyObjKey, [undefined, 10], '3002', (err, resp) => {
assert.ifError(err);
const value = resp.read();
assert.strictEqual(value, null);
done();
});
});
test('should get 200 OK on range query "bytes=-10" of empty object', done => {
client.get(emptyObjKey, [undefined, 10], '3002', (err, resp) => {
assert.ifError(err);
const value = resp.read();
assert.strictEqual(value, null);
done();
});
});
});
});

View File

@ -161,7 +161,7 @@ describe('level-net - LevelDB over network', () => {
async.series(opList, cb);
}
before(done => {
beforeAll(done => {
temp.mkdir('level-net-testdb-', (err, dbDir) => {
const rootDb = level(dbDir);
db = sublevel(rootDb);
@ -169,7 +169,7 @@ describe('level-net - LevelDB over network', () => {
});
});
after(done => {
afterAll(done => {
client.once('disconnect', () => {
server.close();
done();
@ -178,17 +178,17 @@ describe('level-net - LevelDB over network', () => {
});
describe('simple tests', () => {
it('should be able to perform a complete CRUD test', done => {
test('should be able to perform a complete CRUD test', done => {
doCRUDTest(client, 'CRUD', 'testkey', done);
});
});
describe('sublevel tests', () => {
it('should be able to do CRUD on sublevel', done => {
test('should be able to do CRUD on sublevel', done => {
const subLevel = client.openSub('sub1');
assert(subLevel);
doCRUDTest(subLevel, 'CRUD', 'subkey', done);
});
it('should be able to re-open a sublevel', done => {
test('should be able to re-open a sublevel', done => {
const subLevel = client.openSub('sub2');
doCRUDTest(subLevel, 'C', 'subkey', err => {
assert.ifError(err);
@ -196,7 +196,7 @@ describe('level-net - LevelDB over network', () => {
doCRUDTest(subLevel2, 'RD', 'subkey', done);
});
});
it('should separate sublevel namespaces correctly', done => {
test('should separate sublevel namespaces correctly', done => {
const subLevel = client.openSub('sub3');
doCRUDTest(subLevel, 'C', 'subkey', err => {
assert.ifError(err);
@ -208,7 +208,7 @@ describe('level-net - LevelDB over network', () => {
});
});
});
it('should be able to nest multiple sub-levels', done => {
test('should be able to nest multiple sub-levels', done => {
const subLevel = client.openSub('sub4');
const nestedSub1 = subLevel.openSub('sub4-nested1');
const nestedSub2 = nestedSub1.openSub('nested-nested');
@ -249,10 +249,10 @@ describe('level-net - LevelDB over network', () => {
.put(keyOfIter(i), valueOfIter(i), params, putCb);
}
}
before(done => {
beforeAll(done => {
prefillKeys(done);
});
it('should be able to read keys back at random', done => {
test('should be able to read keys back at random', done => {
const nbGet = 100;
let nbGetDone = 0;
@ -272,7 +272,7 @@ describe('level-net - LevelDB over network', () => {
.get(keyOfIter(randI), params, getCb);
}
});
it('should be able to list all keys through a stream and ' +
test('should be able to list all keys through a stream and ' +
'rewrite them on-the-fly', done => {
client.createReadStream((err, keyStream) => {
assert.ifError(err);
@ -311,7 +311,7 @@ describe('level-net - LevelDB over network', () => {
});
});
});
it('should be able to abort key listing properly when client ' +
test('should be able to abort key listing properly when client ' +
'destroys the stream', done => {
client.createReadStream((err, keyStream) => {
assert.ifError(err);
@ -344,7 +344,7 @@ describe('level-net - LevelDB over network', () => {
});
});
});
it('should delete all keys successfully', done => {
test('should delete all keys successfully', done => {
let nbDelDone = 0;
function checkAllDeleted(done) {

View File

@ -53,11 +53,11 @@ describe('rpc - generic client/server RPC system', () => {
miscClient.connect(done);
}
before(done => {
beforeAll(done => {
setupRPC(done);
});
after(done => {
afterAll(done => {
miscClient.once('disconnect', () => {
server.close();
done();
@ -66,7 +66,7 @@ describe('rpc - generic client/server RPC system', () => {
});
describe('simple tests', () => {
it('should ping an RPC server (sync on server)', done => {
test('should ping an RPC server (sync on server)', done => {
miscClient.withRequestLogger(reqLogger).ping((err, args) => {
if (err) {
return done(err);
@ -75,7 +75,7 @@ describe('rpc - generic client/server RPC system', () => {
return done();
});
});
it('should ping an RPC server (async on server)', done => {
test('should ping an RPC server (async on server)', done => {
miscClient.withRequestLogger(reqLogger).pingAsync((err, args) => {
if (err) {
return done(err);
@ -87,7 +87,7 @@ describe('rpc - generic client/server RPC system', () => {
});
describe('error tests', () => {
it('should timeout if command is too long to respond', done => {
test('should timeout if command is too long to respond', done => {
// shorten the timeout to 200ms to speed up the test
const oldTimeout = miscClient.getCallTimeout();
miscClient.setCallTimeout(200);
@ -98,7 +98,7 @@ describe('rpc - generic client/server RPC system', () => {
});
miscClient.setCallTimeout(oldTimeout);
});
it('should throw if last argument of call is not a callback', done => {
test('should throw if last argument of call is not a callback', done => {
assert.throws(() => {
miscClient.withRequestLogger(reqLogger).pingAsync(
'not a callback');

View File

@ -48,7 +48,7 @@ beforeEach(() => {
});
describe('Policies validation - Invalid JSON', () => {
it('should return error for invalid JSON', () => {
test('should return error for invalid JSON', () => {
const result = validateUserPolicy('{"Version":"2012-10-17",' +
'"Statement":{"Effect":"Allow""Action":"s3:PutObject",' +
'"Resource":"arn:aws:s3*"}}');
@ -57,16 +57,16 @@ describe('Policies validation - Invalid JSON', () => {
});
describe('Policies validation - Version', () => {
it('should validate with version date 2012-10-17', () => {
test('should validate with version date 2012-10-17', () => {
check(policy, successRes);
});
it('should return error for other dates', () => {
test('should return error for other dates', () => {
policy.Version = '2012-11-17';
check(policy, failRes());
});
it('should return error if Version field is missing', () => {
test('should return error if Version field is missing', () => {
policy.Version = undefined;
check(policy, failRes(errDict.required.Version));
});
@ -144,15 +144,15 @@ describe('Policies validation - Principal', () => {
name: 'with backbeat service',
value: { Service: 'backbeat' },
},
].forEach(test => {
it(`should allow principal field with ${test.name}`, () => {
policy.Statement.Principal = test.value;
].forEach(t => {
test(`should allow principal field with ${t.name}`, () => {
policy.Statement.Principal = t.value;
delete policy.Statement.Resource;
check(policy, successRes);
});
it(`shoud allow notPrincipal field with ${test.name}`, () => {
policy.Statement.NotPrincipal = test.value;
test(`shoud allow notPrincipal field with ${t.name}`, () => {
policy.Statement.NotPrincipal = t.value;
delete policy.Statement.Resource;
check(policy, successRes);
});
@ -244,26 +244,26 @@ describe('Policies validation - Principal', () => {
name: 'with other service than backbeat',
value: { Service: 'non-existent-service' },
},
].forEach(test => {
it(`should fail with ${test.name}`, () => {
policy.Statement.Principal = test.value;
].forEach(t => {
test(`should fail with ${t.name}`, () => {
policy.Statement.Principal = t.value;
delete policy.Statement.Resource;
check(policy, failRes());
});
});
it('should not allow Resource field', () => {
test('should not allow Resource field', () => {
policy.Statement.Principal = '*';
check(policy, failRes());
});
});
describe('Policies validation - Statement', () => {
it('should succeed for a valid object', () => {
test('should succeed for a valid object', () => {
check(policy, successRes);
});
it('should succeed for a valid array', () => {
test('should succeed for a valid array', () => {
policy.Statement = [
{
Effect: 'Allow',
@ -279,43 +279,43 @@ describe('Policies validation - Statement', () => {
check(policy, successRes);
});
it('should return an error for undefined', () => {
test('should return an error for undefined', () => {
policy.Statement = undefined;
check(policy, failRes());
});
it('should return an error for an empty list', () => {
test('should return an error for an empty list', () => {
policy.Statement = [];
check(policy, failRes());
});
it('should return an error for an empty object', () => {
test('should return an error for an empty object', () => {
policy.Statement = {};
check(policy, failRes(errDict.required.Action));
});
it('should return an error for missing a required field - Action', () => {
test('should return an error for missing a required field - Action', () => {
delete policy.Statement.Action;
check(policy, failRes(errDict.required.Action));
});
it('should return an error for missing a required field - Effect', () => {
test('should return an error for missing a required field - Effect', () => {
delete policy.Statement.Effect;
check(policy, failRes());
});
it('should return an error for missing a required field - Resource', () => {
test('should return an error for missing a required field - Resource', () => {
delete policy.Statement.Resource;
check(policy, failRes());
});
it('should return an error for missing multiple required fields', () => {
test('should return an error for missing multiple required fields', () => {
delete policy.Statement.Effect;
delete policy.Statement.Resource;
check(policy, failRes());
});
it('should succeed with optional fields missing - Sid, Condition', () => {
test('should succeed with optional fields missing - Sid, Condition', () => {
delete policy.Statement.Sid;
delete policy.Statement.Condition;
check(policy, successRes);
@ -323,37 +323,37 @@ describe('Policies validation - Statement', () => {
});
describe('Policies validation - Statement::Sid_block', () => {
it('should succeed if Sid is any alphanumeric string', () => {
test('should succeed if Sid is any alphanumeric string', () => {
check(policy, successRes);
});
it('should fail if Sid is not a valid format', () => {
test('should fail if Sid is not a valid format', () => {
policy.Statement.Sid = 'foo bar()';
check(policy, failRes());
});
it('should fail if Sid is not a string', () => {
test('should fail if Sid is not a string', () => {
policy.Statement.Sid = 1234;
check(policy, failRes());
});
});
describe('Policies validation - Statement::Effect_block', () => {
it('should succeed for Allow', () => {
test('should succeed for Allow', () => {
check(policy, successRes);
});
it('should succeed for Deny', () => {
test('should succeed for Deny', () => {
policy.Statement.Effect = 'Deny';
check(policy, successRes);
});
it('should fail for strings other than Allow/Deny', () => {
test('should fail for strings other than Allow/Deny', () => {
policy.Statement.Effect = 'Reject';
check(policy, failRes());
});
it('should fail if Effect is not a string', () => {
test('should fail if Effect is not a string', () => {
policy.Statement.Effect = 1;
check(policy, failRes());
});
@ -366,7 +366,7 @@ describe('Policies validation - Statement::Action_block/' +
policy.Statement.NotAction = undefined;
});
it('should succeed for foo:bar', () => {
test('should succeed for foo:bar', () => {
policy.Statement.Action = 'foo:bar';
check(policy, successRes);
@ -375,7 +375,7 @@ describe('Policies validation - Statement::Action_block/' +
check(policy, successRes);
});
it('should succeed for foo:*', () => {
test('should succeed for foo:*', () => {
policy.Statement.Action = 'foo:*';
check(policy, successRes);
@ -384,7 +384,7 @@ describe('Policies validation - Statement::Action_block/' +
check(policy, successRes);
});
it('should succeed for *', () => {
test('should succeed for *', () => {
policy.Statement.Action = '*';
check(policy, successRes);
@ -393,7 +393,7 @@ describe('Policies validation - Statement::Action_block/' +
check(policy, successRes);
});
it('should fail for **', () => {
test('should fail for **', () => {
policy.Statement.Action = '**';
check(policy, failRes(errDict.pattern.Action));
@ -402,7 +402,7 @@ describe('Policies validation - Statement::Action_block/' +
check(policy, failRes(errDict.pattern.Action));
});
it('should fail for foobar', () => {
test('should fail for foobar', () => {
policy.Statement.Action = 'foobar';
check(policy, failRes(errDict.pattern.Action));
@ -419,7 +419,7 @@ describe('Policies validation - Statement::Resource_block' +
policy.Statement.NotResource = undefined;
});
it('should succeed for arn:aws:s3:::*', () => {
test('should succeed for arn:aws:s3:::*', () => {
policy.Statement.Resource = 'arn:aws:s3:::*';
check(policy, successRes);
@ -428,7 +428,7 @@ describe('Policies validation - Statement::Resource_block' +
check(policy, successRes);
});
it('should succeed for arn:aws:s3:::test/home/${aws:username}', () => {
test('should succeed for arn:aws:s3:::test/home/${aws:username}', () => {
policy.Statement.Resource = 'arn:aws:s3:::test/home/${aws:username}';
check(policy, successRes);
@ -437,7 +437,7 @@ describe('Policies validation - Statement::Resource_block' +
check(policy, successRes);
});
it('should succeed for arn:aws:ec2:us-west-1:1234567890:vol/*', () => {
test('should succeed for arn:aws:ec2:us-west-1:1234567890:vol/*', () => {
policy.Statement.Resource = 'arn:aws:ec2:us-west-1:1234567890:vol/*';
check(policy, successRes);
@ -446,7 +446,7 @@ describe('Policies validation - Statement::Resource_block' +
check(policy, successRes);
});
it('should succeed for *', () => {
test('should succeed for *', () => {
policy.Statement.Resource = '*';
check(policy, successRes);
@ -455,7 +455,7 @@ describe('Policies validation - Statement::Resource_block' +
check(policy, successRes);
});
it('should fail for arn:aws:ec2:us-west-1:vol/* - missing region', () => {
test('should fail for arn:aws:ec2:us-west-1:vol/* - missing region', () => {
policy.Statement.Resource = 'arn:aws:ec2:1234567890:vol/*';
check(policy, failRes(errDict.pattern.Resource));
@ -464,7 +464,7 @@ describe('Policies validation - Statement::Resource_block' +
check(policy, failRes(errDict.pattern.Resource));
});
it('should fail for arn:aws:ec2:us-west-1:123456789:v/${} - ${}', () => {
test('should fail for arn:aws:ec2:us-west-1:123456789:v/${} - ${}', () => {
policy.Statement.Resource = 'arn:aws:ec2:us-west-1:123456789:v/${}';
check(policy, failRes(errDict.pattern.Resource));
@ -473,7 +473,7 @@ describe('Policies validation - Statement::Resource_block' +
check(policy, failRes(errDict.pattern.Resource));
});
it('should fail for ec2:us-west-1:qwerty:vol/* - missing arn:aws:', () => {
test('should fail for ec2:us-west-1:qwerty:vol/* - missing arn:aws:', () => {
policy.Statement.Resource = 'ec2:us-west-1:123456789012:vol/*';
check(policy, failRes(errDict.pattern.Resource));
@ -482,18 +482,18 @@ describe('Policies validation - Statement::Resource_block' +
check(policy, failRes(errDict.pattern.Resource));
});
it('should fail for empty list of resources', () => {
test('should fail for empty list of resources', () => {
policy.Statement.Resource = [];
check(policy, failRes(errDict.minItems.Resource));
});
});
describe('Policies validation - Statement::Condition_block', () => {
it('should succeed for single Condition', () => {
test('should succeed for single Condition', () => {
check(policy, successRes);
});
it('should succeed for multiple Conditions', () => {
test('should succeed for multiple Conditions', () => {
policy.Statement.Condition = {
StringNotLike: { 's3:prefix': ['Development/*'] },
Null: { 's3:prefix': false },
@ -501,19 +501,19 @@ describe('Policies validation - Statement::Condition_block', () => {
check(policy, successRes);
});
it('should fail when Condition is not an Object', () => {
test('should fail when Condition is not an Object', () => {
policy.Statement.Condition = 'NumericLessThanEquals';
check(policy, failRes());
});
it('should fail for an invalid Condition', () => {
test('should fail for an invalid Condition', () => {
policy.Statement.Condition = {
SomethingLike: { 's3:prefix': ['Development/*'] },
};
check(policy, failRes());
});
it('should fail when one of the multiple conditions is invalid', () => {
test('should fail when one of the multiple conditions is invalid', () => {
policy.Statement.Condition = {
Null: { 's3:prefix': false },
SomethingLike: { 's3:prefix': ['Development/*'] },
@ -521,7 +521,7 @@ describe('Policies validation - Statement::Condition_block', () => {
check(policy, failRes());
});
it('should fail when invalid property is assigned', () => {
test('should fail when invalid property is assigned', () => {
policy.Condition = {
SomethingLike: { 's3:prefix': ['Development/*'] },
};

View File

@ -289,10 +289,10 @@ describe('Principal evaluator', () => {
},
result: 'Deny',
},
].forEach(test => {
it(`_evaluatePrincipalField(): ${test.name}`, () => {
].forEach(t => {
test(`_evaluatePrincipalField(): ${t.name}`, () => {
assert.strictEqual(Principal._evaluatePrincipalField(defaultParams,
test.statement, test.valids), test.result);
t.statement, t.valids), t.result);
});
});
@ -348,17 +348,17 @@ describe('Principal evaluator', () => {
valids: defaultValids,
result: 'Deny',
},
].forEach(test => {
it(`_evaluatePrincipal(): ${test.name}`, () => {
].forEach(t => {
test(`_evaluatePrincipal(): ${t.name}`, () => {
const params = {
log: defaultParams.log,
trustedPolicy: {
Statement: test.statement,
Statement: t.statement,
},
};
const valids = test.valids;
const valids = t.valids;
assert.strictEqual(Principal._evaluatePrincipal(params, valids),
test.result);
t.result);
});
});
@ -621,27 +621,27 @@ describe('Principal evaluator', () => {
checkAction: true,
},
},
].forEach(test => {
it(`evaluatePrincipal(): ${test.name}`, () => {
].forEach(t => {
test(`evaluatePrincipal(): ${t.name}`, () => {
const rc = new RequestContext({}, {}, '', '', '127.0.0.1',
false, 'assumeRole', 'sts', null, {
accountid: test.requester.accountId,
arn: test.requester.arn,
parentArn: test.requester.parentArn,
principalType: test.requester.userType,
accountid: t.requester.accountId,
arn: t.requester.arn,
parentArn: t.requester.parentArn,
principalType: t.requester.userType,
externalId: '4321',
}, 'v4', 'V4');
const params = {
log: defaultParams.log,
trustedPolicy: {
Statement: test.statement,
Statement: t.statement,
},
rc,
targetAccountId: test.target.accountId,
targetAccountId: t.target.accountId,
};
const result = Principal.evaluatePrincipal(params);
assert.deepStrictEqual(result, test.result);
assert.deepStrictEqual(result, t.result);
});
});
});

File diff suppressed because it is too large Load Diff

View File

@ -19,15 +19,15 @@ function testMD5(payload, expectedMD5, done) {
}
describe('utilities.MD5Sum', () => {
it('should work on empty request', done => {
test('should work on empty request', done => {
testMD5('', constants.emptyFileMd5, done);
});
it('should work on SAY GRRRR!!! request', done => {
test('should work on SAY GRRRR!!! request', done => {
testMD5('SAY GRRRR!!!', '986eb4a201192e8b1723a42c1468fb4e', done);
});
it('should work on multiple MiB data stream', done => {
test('should work on multiple MiB data stream', done => {
/*
* relies on a zero filled buffer and
* split content in order to get multiple calls of _transform()

View File

@ -38,7 +38,7 @@ describe('s3middleware SubStreamInterface.stopStreaming()', () => {
subStreamInterface._currentStream.on('finish', () => {
eventsEmitted.currentStreamEnded = eventSequence++;
});
it('should stop streaming data and end current stream', done => {
test('should stop streaming data and end current stream', done => {
sourceStream.on('data', chunk => {
const currentLength = subStreamInterface.getLengthCounter();
if (currentLength === 10) {

View File

@ -49,19 +49,19 @@ const subPartInfoTests = [
];
describe('s3middleware Azure MPU helper utility function', () => {
padStringTests.forEach(test => {
it(`padString should pad a ${test.category}`, done => {
const result = test.strings.map(str =>
padString(str, test.category));
assert.deepStrictEqual(result, test.expectedResults);
padStringTests.forEach(t => {
test(`padString should pad a ${t.category}`, done => {
const result = t.strings.map(str =>
padString(str, t.category));
assert.deepStrictEqual(result, t.expectedResults);
done();
});
});
subPartInfoTests.forEach(test => {
subPartInfoTests.forEach(t => {
const { desc, size, expectedNumberSubParts, expectedLastPartSize }
= test;
it('getSubPartInfo should return correct result for ' +
= t;
test('getSubPartInfo should return correct result for ' +
`dataContentLength of ${desc}`, done => {
const result = getSubPartInfo(size);
const expectedLastPartIndex = expectedNumberSubParts - 1;

View File

@ -22,10 +22,10 @@ describe('s3middleware.NullStream', () => {
for (let i = 0; i < nullChunks.length; ++i) {
const size = nullChunks[i].size;
const md5sum = nullChunks[i].md5sum;
it(`should generate ${size} null bytes by size`, done => {
test(`should generate ${size} null bytes by size`, done => {
testNullChunk(size, null, md5sum, done);
});
it(`should generate ${size} null bytes by range`, done => {
test(`should generate ${size} null bytes by range`, done => {
const dummyOffset = 9320954;
testNullChunk(0, [dummyOffset, dummyOffset + size - 1],
md5sum, done);

View File

@ -8,14 +8,14 @@ const hexHash = 'd41d8cd98f00b204e9800998ecf8427e';
const base64Hash = '1B2M2Y8AsgTpgAmY7PhCfg==';
describe('s3middleware object utilites', () => {
it('should convert hexademal MD5 to base 64', done => {
test('should convert hexademal MD5 to base 64', done => {
const hash = crypto.createHash('md5').digest('hex');
const convertedHash = objectUtils.getBase64MD5(hash);
assert.strictEqual(convertedHash, base64Hash);
done();
});
it('should convert base 64 MD5 to hexadecimal', done => {
test('should convert base 64 MD5 to hexadecimal', done => {
const hash = crypto.createHash('md5').digest('base64');
const convertedHash = objectUtils.getHexMD5(hash);
assert.strictEqual(convertedHash, hexHash);

View File

@ -117,7 +117,7 @@ function _addCases(dominantHeader, recessiveHeader) {
_addCases('if-none-match', 'if-modified-since');
function checkSuccess(h) {
it('should succeed when value meets condition', () => {
test('should succeed when value meets condition', () => {
const headers = {};
headers[h] = basicTestCases[h].success;
const result =
@ -127,7 +127,7 @@ function checkSuccess(h) {
}
function checkFailure(h) {
it('should fail when value does not meet condition', () => {
test('should fail when value does not meet condition', () => {
const headers = {};
headers[h] = basicTestCases[h].fail;
const result =
@ -140,7 +140,7 @@ function checkFailure(h) {
function checkCaseResult(testCase) {
const h = Object.keys(testCase.headers);
it(`"${h[0]}" ${testCase.headers[h[0]]} and "${h[1]}" ` +
test(`"${h[0]}" ${testCase.headers[h[0]]} and "${h[1]}" ` +
`${testCase.headers[h[1]]}: should return ${testCase.result}`, () => {
const testHeaders = {};
h.forEach(key => {
@ -179,25 +179,26 @@ describe('_checkEtagMatch function :', () => {
};
const listOfValues = eTagMatchValues.map(item => item.value).join();
eTagMatchValues.forEach(item => {
it(`should return success for ${item.desc}`, () => {
test(`should return success for ${item.desc}`, () => {
const result = _checkEtagMatch(item.value, contentMD5);
assert.deepStrictEqual(result, expectedSuccess);
});
});
it('should return success for multiple valid values', () => {
test('should return success for multiple valid values', () => {
const result = _checkEtagMatch(listOfValues, contentMD5);
assert.deepStrictEqual(result, expectedSuccess);
});
it('should return success for multiple valid values with comma at index 0',
() => {
const result = _checkEtagMatch(`,${listOfValues}`, contentMD5);
assert.deepStrictEqual(result, expectedSuccess);
});
test(
'should return success for multiple valid values with comma at index 0',
() => {
const result = _checkEtagMatch(`,${listOfValues}`, contentMD5);
assert.deepStrictEqual(result, expectedSuccess);
}
);
it('should return success as long as one value in list is valid',
() => {
test('should return success as long as one value in list is valid', () => {
const result = _checkEtagMatch(`${listOfValues},aaa`, contentMD5);
assert.deepStrictEqual(result, expectedSuccess);
});
@ -206,9 +207,9 @@ describe('_checkEtagMatch function :', () => {
{ desc: 'if only value does not match', value: 'aaa' },
{ desc: 'for list of non-matching values', value: 'aaa,bbb,ccc' },
];
failTests.forEach(test => {
it(`should return PreconditionFailed ${test.desc}`, () => {
const result = _checkEtagMatch(test.value, contentMD5);
failTests.forEach(t => {
test(`should return PreconditionFailed ${t.desc}`, () => {
const result = _checkEtagMatch(t.value, contentMD5);
assert.deepStrictEqual(result.error, errors.PreconditionFailed);
});
});
@ -216,26 +217,25 @@ describe('_checkEtagMatch function :', () => {
describe('_checkEtagNoneMatch function :', () => {
eTagMatchValues.forEach(item => {
it(`should return NotModified for ${item.desc}`, () => {
test(`should return NotModified for ${item.desc}`, () => {
const result = _checkEtagNoneMatch(item.value, contentMD5);
assert.deepStrictEqual(result.error, errors.NotModified);
});
it(`should return NotModified if ${item.desc} is in a list of ` +
'otherwise non-matching values',
() => {
test(`should return NotModified if ${item.desc} is in a list of ` +
'otherwise non-matching values', () => {
const result = _checkEtagNoneMatch(`aaa,${item.value},bbb`,
contentMD5);
assert.deepStrictEqual(result.error, errors.NotModified);
});
});
it('should return success for multiple non-matching values', () => {
test('should return success for multiple non-matching values', () => {
const result = _checkEtagNoneMatch('aaa,bbb,ccc', contentMD5);
assert.deepStrictEqual(result, expectedSuccess);
});
it('should return success for multiple non-matching values ' +
test('should return success for multiple non-matching values ' +
'with comma at index 0', () => {
const result = _checkEtagNoneMatch(',aaa,bbb,ccc', contentMD5);
assert.deepStrictEqual(result, expectedSuccess);
@ -243,50 +243,57 @@ describe('_checkEtagNoneMatch function :', () => {
});
describe('_checkModifiedSince function :', () => {
it('should return InvalidArgument if header has invalid value', () => {
test('should return InvalidArgument if header has invalid value', () => {
const result = _checkModifiedSince('aaaa', lastModified);
assert.deepStrictEqual(result.error, errors.InvalidArgument);
});
it('should return success if header value is earlier to than last modified',
() => {
const result = _checkModifiedSince(beforeLastModified, lastModified);
assert.deepStrictEqual(result, expectedSuccess);
});
test(
'should return success if header value is earlier to than last modified',
() => {
const result = _checkModifiedSince(beforeLastModified, lastModified);
assert.deepStrictEqual(result, expectedSuccess);
}
);
it('should return NotModified if header value is later than last modified',
() => {
const result = _checkModifiedSince(afterLastModified, lastModified);
assert.deepStrictEqual(result.error, errors.NotModified);
});
test(
'should return NotModified if header value is later than last modified',
() => {
const result = _checkModifiedSince(afterLastModified, lastModified);
assert.deepStrictEqual(result.error, errors.NotModified);
}
);
it('should return NotModified if header value is equal to last modified',
() => {
const result = _checkModifiedSince(lastModified, lastModified);
assert.deepStrictEqual(result.error, errors.NotModified);
});
test(
'should return NotModified if header value is equal to last modified',
() => {
const result = _checkModifiedSince(lastModified, lastModified);
assert.deepStrictEqual(result.error, errors.NotModified);
}
);
});
describe('_checkUnmodifiedSince function :', () => {
it('should return InvalidArgument if header has invalid value', () => {
test('should return InvalidArgument if header has invalid value', () => {
const result = _checkUnmodifiedSince('aaaa', lastModified);
assert.deepStrictEqual(result.error, errors.InvalidArgument);
});
it('should return PreconditionFailed if header value is earlier than ' +
test('should return PreconditionFailed if header value is earlier than ' +
'last modified', () => {
const result = _checkUnmodifiedSince(beforeLastModified, lastModified);
assert.deepStrictEqual(result.error, errors.PreconditionFailed);
});
it('should return success if header value is later to than last modified',
() => {
const result = _checkUnmodifiedSince(afterLastModified, lastModified);
assert.deepStrictEqual(result, expectedSuccess);
});
test(
'should return success if header value is later to than last modified',
() => {
const result = _checkUnmodifiedSince(afterLastModified, lastModified);
assert.deepStrictEqual(result, expectedSuccess);
}
);
it('should return success if header value is equal to last modified',
() => {
test('should return success if header value is equal to last modified', () => {
const result = _checkUnmodifiedSince(lastModified, lastModified);
assert.deepStrictEqual(result, expectedSuccess);
});

View File

@ -24,7 +24,7 @@ const validHosts = [
];
describe('routesUtils.getBucketNameFromHost', () => {
it('should extract valid buckets for one endpoint', () => {
test('should extract valid buckets for one endpoint', () => {
[
'b', 'mybucket',
'buck-et', '-buck-et', 'buck-et-',
@ -40,7 +40,7 @@ describe('routesUtils.getBucketNameFromHost', () => {
});
});
it('should also accept website endpoints', () => {
test('should also accept website endpoints', () => {
[
'in-french.bucket.is-seau.s3-website-eu-west-1.amazonaws.com',
'in-french.bucket.is-seau.s3-website-us-east-1.amazonaws.com',
@ -55,7 +55,7 @@ describe('routesUtils.getBucketNameFromHost', () => {
});
});
it('should return undefined when non dns-style', () => {
test('should return undefined when non dns-style', () => {
[
's3.amazonaws.com',
].forEach(host => {
@ -66,7 +66,7 @@ describe('routesUtils.getBucketNameFromHost', () => {
});
});
it('should return undefined when IP addresses', () => {
test('should return undefined when IP addresses', () => {
[
'127.0.0.1',
'8.8.8.8',
@ -82,7 +82,7 @@ describe('routesUtils.getBucketNameFromHost', () => {
});
});
it('should throw when bad request', () => {
test('should throw when bad request', () => {
[
{},
{ host: '' },

View File

@ -5,13 +5,13 @@ const bannedStr = 'banned';
const prefixBlacklist = [];
describe('routesUtils.isValidBucketName', () => {
it('should return false if bucketname is fewer than ' +
test('should return false if bucketname is fewer than ' +
'3 characters long', () => {
const result = routesUtils.isValidBucketName('no', prefixBlacklist);
assert.strictEqual(result, false);
});
it('should return false if bucketname is greater than ' +
test('should return false if bucketname is greater than ' +
'63 characters long', () => {
const longString = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' +
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa';
@ -20,39 +20,39 @@ describe('routesUtils.isValidBucketName', () => {
assert.strictEqual(result, false);
});
it('should return false if bucketname contains capital letters ' +
test('should return false if bucketname contains capital letters ' +
'and is not whitelisted', () => {
const result =
routesUtils.isValidBucketName('noSHOUTING', prefixBlacklist);
assert.strictEqual(result, false);
});
it('should return true if bucketname contains capital letters ' +
test('should return true if bucketname contains capital letters ' +
'but is whitelisted', () => {
const result =
routesUtils.isValidBucketName('METADATA', prefixBlacklist);
assert.strictEqual(result, true);
});
it('should return false if bucketname starts w/ blacklisted prefix', () => {
test('should return false if bucketname starts w/ blacklisted prefix', () => {
const result =
routesUtils.isValidBucketName('bannedbucket', [bannedStr]);
assert.strictEqual(result, false);
});
it('should return false if bucketname is an IP address', () => {
test('should return false if bucketname is an IP address', () => {
const result =
routesUtils.isValidBucketName('172.16.254.1', prefixBlacklist);
assert.strictEqual(result, false);
});
it('should return false if bucketname is not DNS compatible', () => {
test('should return false if bucketname is not DNS compatible', () => {
const result =
routesUtils.isValidBucketName('*notvalid*', prefixBlacklist);
assert.strictEqual(result, false);
});
it('should return true if bucketname does not break rules', () => {
test('should return true if bucketname does not break rules', () => {
const result = routesUtils.isValidBucketName('okay', prefixBlacklist);
assert.strictEqual(result, true);
});

View File

@ -13,7 +13,7 @@ const validHosts = [
];
describe('routesUtils.normalizeRequest', () => {
it('should parse bucket name from path', () => {
test('should parse bucket name from path', () => {
const request = {
url: `/${bucketName}`,
headers: { host: 's3.amazonaws.com' },
@ -23,7 +23,7 @@ describe('routesUtils.normalizeRequest', () => {
assert.strictEqual(result.parsedHost, 's3.amazonaws.com');
});
it('should parse bucket name from path when no slash', () => {
test('should parse bucket name from path when no slash', () => {
const request = {
url: `${bucketName}`,
headers: { host: 's3.amazonaws.com' },
@ -33,7 +33,7 @@ describe('routesUtils.normalizeRequest', () => {
assert.strictEqual(result.parsedHost, 's3.amazonaws.com');
});
it('should parse bucket name from host', () => {
test('should parse bucket name from host', () => {
const request = {
url: '/',
headers: { host: `${bucketName}.s3.amazonaws.com` },
@ -43,7 +43,7 @@ describe('routesUtils.normalizeRequest', () => {
assert.strictEqual(result.parsedHost, 's3.amazonaws.com');
});
it('should parse bucket and object name from path', () => {
test('should parse bucket and object name from path', () => {
const request = {
url: `/${bucketName}/${objName}`,
headers: { host: 's3.amazonaws.com' },
@ -54,7 +54,7 @@ describe('routesUtils.normalizeRequest', () => {
assert.strictEqual(result.parsedHost, 's3.amazonaws.com');
});
it('should parse bucket and object name from path with IP address', () => {
test('should parse bucket and object name from path with IP address', () => {
const request = {
url: `/${bucketName}/${objName}`,
headers: { host: '[::1]' },
@ -65,7 +65,7 @@ describe('routesUtils.normalizeRequest', () => {
assert.strictEqual(result.parsedHost, '[::1]');
});
it('should parse bucket name from host ' +
test('should parse bucket name from host ' +
'and object name from path', () => {
const request = {
url: `/${objName}`,

View File

@ -5,7 +5,7 @@ const assert = require('assert');
const shuffle = require('../../index').shuffle;
describe('Shuffle', () => {
it('should fail less than 0.005% times', done => {
test('should fail less than 0.005% times', done => {
let array = [];
const reference = [1, 2, 3, 4, 5, 6, 7, 8, 9, 0];
let fails = 0;

View File

@ -117,16 +117,16 @@ describe('raft record log client', () => {
done();
}
before(done => {
beforeAll(done => {
setup(done);
});
after(done => {
afterAll(done => {
done();
});
describe('readRecords', () => {
it('should list all records in a log', done => {
test('should list all records in a log', done => {
let nbRecords = 0;
logClient.readRecords({}, (err, info) => {
const recordStream = info.log;
@ -160,7 +160,7 @@ describe('raft record log client', () => {
});
});
describe('error cases', () => {
it('should handle 404 error gracefully', done => {
test('should handle 404 error gracefully', done => {
const logClient = new LogConsumer({ bucketClient,
raftSession: 1 });
logClient.readRecords({}, (err, info) => {
@ -170,7 +170,7 @@ describe('raft record log client', () => {
done();
});
});
it('should handle 416 error gracefully', done => {
test('should handle 416 error gracefully', done => {
const logClient = new LogConsumer({ bucketClient,
raftSession: 2 });
logClient.readRecords({}, (err, info) => {
@ -180,7 +180,7 @@ describe('raft record log client', () => {
done();
});
});
it('should handle other errors correctly', done => {
test('should handle other errors correctly', done => {
const logClient = new LogConsumer({ bucketClient,
raftSession: 3 });
logClient.readRecords({}, err => {
@ -189,7 +189,7 @@ describe('raft record log client', () => {
done();
});
});
it('should return error with malformed log response', done => {
test('should return error with malformed log response', done => {
const logClient = new LogConsumer({ bucketClient,
raftSession: 4 });
logClient.readRecords({}, err => {
@ -198,7 +198,7 @@ describe('raft record log client', () => {
done();
});
});
it('should emit error event if a log entry is malformed', done => {
test('should emit error event if a log entry is malformed', done => {
const logClient = new LogConsumer({ bucketClient,
raftSession: 5 });
logClient.readRecords({}, (err, res) => {

View File

@ -66,7 +66,7 @@ describe('record log - persistent log of metadata operations', () => {
done();
}
before(done => {
beforeAll(done => {
temp.mkdir('record-log-testdir-', (err, dbDir) => {
const rootDb = level(dbDir);
db = sublevel(rootDb);
@ -74,7 +74,7 @@ describe('record log - persistent log of metadata operations', () => {
});
});
after(done => {
afterAll(done => {
server.close();
done();
});
@ -97,7 +97,7 @@ describe('record log - persistent log of metadata operations', () => {
}
});
it('should list an empty record log', done => {
test('should list an empty record log', done => {
logProxy.readRecords({}, (err, res) => {
assert.ifError(err);
const info = res.info;
@ -113,7 +113,7 @@ describe('record log - persistent log of metadata operations', () => {
recordStream.on('end', done);
});
});
it('should be able to add records and list them thereafter', done => {
test('should be able to add records and list them thereafter', done => {
debug('going to append records');
const ops = [{ type: 'put', key: 'foo', value: 'bar',
prefix: ['foobucket'] },
@ -189,7 +189,7 @@ describe('record log - persistent log of metadata operations', () => {
describe('readRecords', () => {
let logProxy;
before(done => {
beforeAll(done => {
logProxy = createScratchRecordLog(cliLogger, err => {
assert.ifError(err);
// fill the log with 1000 entries
@ -238,28 +238,28 @@ describe('record log - persistent log of metadata operations', () => {
done();
});
}
it('should list all entries', done => {
test('should list all entries', done => {
logProxy.readRecords({}, (err, res) => {
assert.ifError(err);
checkReadRecords(res, { startSeq: 1, endSeq: 1000 }, done);
});
});
it('should list all entries from a given startSeq', done => {
test('should list all entries from a given startSeq', done => {
logProxy.readRecords({ startSeq: 500 }, (err, res) => {
assert.ifError(err);
checkReadRecords(res, { startSeq: 500, endSeq: 1000 }, done);
});
});
it('should list all entries up to a given endSeq', done => {
test('should list all entries up to a given endSeq', done => {
logProxy.readRecords({ endSeq: 500 }, (err, res) => {
assert.ifError(err);
checkReadRecords(res, { startSeq: 1, endSeq: 500 }, done);
});
});
it('should list all entries in a seq range', done => {
test('should list all entries in a seq range', done => {
logProxy.readRecords(
{ startSeq: 100, endSeq: 500 }, (err, res) => {
assert.ifError(err);
@ -268,8 +268,7 @@ describe('record log - persistent log of metadata operations', () => {
});
});
it('should list all entries from a given startSeq up to a limit',
done => {
test('should list all entries from a given startSeq up to a limit', done => {
logProxy.readRecords(
{ startSeq: 100, limit: 100 }, (err, res) => {
assert.ifError(err);

View File

@ -25,15 +25,15 @@ function check(array) {
}
describe('StringHash', () => {
it('Should compute a string hash', done => {
test('Should compute a string hash', done => {
const hash1 = stringHash('Hello!');
const hash2 = stringHash('Hello?');
assert.notDeepStrictEqual(hash1, hash2);
done();
});
it(`Should distribute uniformly with a maximum of ${ERROR}% of deviation`,
function f(done) {
this.timeout(20000);
test(
`Should distribute uniformly with a maximum of ${ERROR}% of deviation`,
done => {
const strings = new Array(STRING_COUNT).fill('')
.map(() => randomString(10));
const arr = new Array(ARRAY_LENGTH).fill(0);
@ -42,5 +42,6 @@ describe('StringHash', () => {
++arr[ind];
});
done(check(arr));
});
}
);
});

View File

@ -17,7 +17,7 @@ describe('test generating versionIds', () => {
}
process.env.VID_CRYPTO_PASSWORD = randkey(64);
it('sorted in reversed chronological and alphabetical order', () => {
test('sorted in reversed chronological and alphabetical order', () => {
for (let i = 0; i < count; i++) {
if (i !== 0) {
assert(vids[i - 1] > vids[i],
@ -26,13 +26,16 @@ describe('test generating versionIds', () => {
}
});
it('should return error decoding non-hex string versionIds', () => {
test('should return error decoding non-hex string versionIds', () => {
const encoded = vids.map(vid => VID.encode(vid));
const decoded = encoded.map(vid => VID.decode(`${vid}foo`));
decoded.forEach(result => assert(result instanceof Error));
decoded.forEach(result => {
console.log(result)
assert(result instanceof Error);
});
});
it('should encode and decode versionIds', () => {
test('should encode and decode versionIds', () => {
const encoded = vids.map(vid => VID.encode(vid));
const decoded = encoded.map(vid => VID.decode(vid));
assert.strictEqual(vids.length, count);

View File

@ -93,12 +93,12 @@ function batch(callback) {
describe('test VSP', () => {
afterEach(() => _cleanupKeyValueStore());
it('should run a batch of operations correctly', done => {
test('should run a batch of operations correctly', done => {
async.times(THREADS,
(i, next) => setTimeout(() => batch(next), i), done);
});
it('should be able to repair a PHD master version', done => {
test('should be able to repair a PHD master version', done => {
const putRequest = {
db: 'foo',
key: 'bar',
@ -150,7 +150,7 @@ describe('test VSP', () => {
}),
], done);
});
it('should allow to write a specific version + update master', done => {
test('should allow to write a specific version + update master', done => {
let v1;
let v2;