Compare commits
5 Commits
developmen
...
feature/mi
Author | SHA1 | Date |
---|---|---|
Rahul Padigela | d3cbff8207 | |
Rahul Padigela | fef2e25f31 | |
Rahul Padigela | 2fa4568c78 | |
Rahul Padigela | b6e823f006 | |
Rahul Padigela | e563c9fb8a |
|
@ -0,0 +1,186 @@
|
||||||
|
// For a detailed explanation regarding each configuration property, visit:
|
||||||
|
// https://jestjs.io/docs/en/configuration.html
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
// All imported modules in your tests should be mocked automatically
|
||||||
|
// automock: false,
|
||||||
|
|
||||||
|
// Stop running tests after `n` failures
|
||||||
|
// bail: 0,
|
||||||
|
|
||||||
|
// Respect "browser" field in package.json when resolving modules
|
||||||
|
// browser: false,
|
||||||
|
|
||||||
|
// The directory where Jest should store its cached dependency information
|
||||||
|
// cacheDirectory: "/private/var/folders/3k/pc7s4l115n984kmtypkgfyf40000gn/T/jest_dx",
|
||||||
|
|
||||||
|
// Automatically clear mock calls and instances between every test
|
||||||
|
clearMocks: true,
|
||||||
|
|
||||||
|
// Indicates whether the coverage information should be collected while executing the test
|
||||||
|
// collectCoverage: false,
|
||||||
|
|
||||||
|
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||||
|
// collectCoverageFrom: null,
|
||||||
|
|
||||||
|
// The directory where Jest should output its coverage files
|
||||||
|
coverageDirectory: "coverage",
|
||||||
|
|
||||||
|
// An array of regexp pattern strings used to skip coverage collection
|
||||||
|
// coveragePathIgnorePatterns: [
|
||||||
|
// "/node_modules/"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// A list of reporter names that Jest uses when writing coverage reports
|
||||||
|
// coverageReporters: [
|
||||||
|
// "json",
|
||||||
|
// "text",
|
||||||
|
// "lcov",
|
||||||
|
// "clover"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An object that configures minimum threshold enforcement for coverage results
|
||||||
|
// coverageThreshold: null,
|
||||||
|
|
||||||
|
// A path to a custom dependency extractor
|
||||||
|
// dependencyExtractor: null,
|
||||||
|
|
||||||
|
// Make calling deprecated APIs throw helpful error messages
|
||||||
|
// errorOnDeprecated: false,
|
||||||
|
|
||||||
|
// Force coverage collection from ignored files using an array of glob patterns
|
||||||
|
// forceCoverageMatch: [],
|
||||||
|
|
||||||
|
// A path to a module which exports an async function that is triggered once before all test suites
|
||||||
|
// globalSetup: null,
|
||||||
|
|
||||||
|
// A path to a module which exports an async function that is triggered once after all test suites
|
||||||
|
// globalTeardown: null,
|
||||||
|
|
||||||
|
// A set of global variables that need to be available in all test environments
|
||||||
|
// globals: {},
|
||||||
|
|
||||||
|
// An array of directory names to be searched recursively up from the requiring module's location
|
||||||
|
// moduleDirectories: [
|
||||||
|
// "node_modules"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An array of file extensions your modules use
|
||||||
|
// moduleFileExtensions: [
|
||||||
|
// "js",
|
||||||
|
// "json",
|
||||||
|
// "jsx",
|
||||||
|
// "ts",
|
||||||
|
// "tsx",
|
||||||
|
// "node"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// A map from regular expressions to module names that allow to stub out resources with a single module
|
||||||
|
// moduleNameMapper: {},
|
||||||
|
|
||||||
|
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||||
|
// modulePathIgnorePatterns: [],
|
||||||
|
|
||||||
|
// Activates notifications for test results
|
||||||
|
// notify: false,
|
||||||
|
|
||||||
|
// An enum that specifies notification mode. Requires { notify: true }
|
||||||
|
// notifyMode: "failure-change",
|
||||||
|
|
||||||
|
// A preset that is used as a base for Jest's configuration
|
||||||
|
// preset: null,
|
||||||
|
|
||||||
|
// Run tests from one or more projects
|
||||||
|
// projects: null,
|
||||||
|
|
||||||
|
// Use this configuration option to add custom reporters to Jest
|
||||||
|
// reporters: undefined,
|
||||||
|
|
||||||
|
// Automatically reset mock state between every test
|
||||||
|
// resetMocks: false,
|
||||||
|
|
||||||
|
// Reset the module registry before running each individual test
|
||||||
|
// resetModules: false,
|
||||||
|
|
||||||
|
// A path to a custom resolver
|
||||||
|
// resolver: null,
|
||||||
|
|
||||||
|
// Automatically restore mock state between every test
|
||||||
|
// restoreMocks: false,
|
||||||
|
|
||||||
|
// The root directory that Jest should scan for tests and modules within
|
||||||
|
// rootDir: null,
|
||||||
|
|
||||||
|
// A list of paths to directories that Jest should use to search for files in
|
||||||
|
// roots: [
|
||||||
|
// "<rootDir>"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// Allows you to use a custom runner instead of Jest's default test runner
|
||||||
|
// runner: "jest-runner",
|
||||||
|
|
||||||
|
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||||
|
// setupFiles: [],
|
||||||
|
|
||||||
|
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||||
|
// setupFilesAfterEnv: [],
|
||||||
|
|
||||||
|
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||||
|
// snapshotSerializers: [],
|
||||||
|
|
||||||
|
// The test environment that will be used for testing
|
||||||
|
testEnvironment: "node",
|
||||||
|
|
||||||
|
// Options that will be passed to the testEnvironment
|
||||||
|
// testEnvironmentOptions: {},
|
||||||
|
|
||||||
|
// Adds a location field to test results
|
||||||
|
// testLocationInResults: false,
|
||||||
|
|
||||||
|
// The glob patterns Jest uses to detect test files
|
||||||
|
testMatch: [
|
||||||
|
// "**/__tests__/**/*.[jt]s?(x)",
|
||||||
|
"**/tests/**/*.[jt]s?(x)",
|
||||||
|
// "**/?(*.)+(spec|test).[tj]s?(x)"
|
||||||
|
],
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||||
|
// testPathIgnorePatterns: [
|
||||||
|
// "/node_modules/"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||||
|
// testRegex: [],
|
||||||
|
|
||||||
|
// This option allows the use of a custom results processor
|
||||||
|
// testResultsProcessor: null,
|
||||||
|
|
||||||
|
// This option allows use of a custom test runner
|
||||||
|
// testRunner: "jasmine2",
|
||||||
|
|
||||||
|
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
|
||||||
|
// testURL: "http://localhost",
|
||||||
|
|
||||||
|
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
|
||||||
|
// timers: "real",
|
||||||
|
|
||||||
|
// A map from regular expressions to paths to transformers
|
||||||
|
// transform: null,
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||||
|
// transformIgnorePatterns: [
|
||||||
|
// "/node_modules/"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||||
|
// unmockedModulePathPatterns: undefined,
|
||||||
|
|
||||||
|
// Indicates whether each individual test should be reported during the run
|
||||||
|
// verbose: null,
|
||||||
|
|
||||||
|
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||||
|
// watchPathIgnorePatterns: [],
|
||||||
|
|
||||||
|
// Whether to use watchman for file crawling
|
||||||
|
// watchman: true,
|
||||||
|
};
|
File diff suppressed because it is too large
Load Diff
31
package.json
31
package.json
|
@ -56,6 +56,7 @@
|
||||||
"ioredis": "2.4.0",
|
"ioredis": "2.4.0",
|
||||||
"istanbul": "1.0.0-alpha.2",
|
"istanbul": "1.0.0-alpha.2",
|
||||||
"istanbul-api": "1.0.0-alpha.13",
|
"istanbul-api": "1.0.0-alpha.13",
|
||||||
|
"jest": "^24.5.0",
|
||||||
"lolex": "^1.4.0",
|
"lolex": "^1.4.0",
|
||||||
"mocha": "^2.3.4",
|
"mocha": "^2.3.4",
|
||||||
"mocha-junit-reporter": "1.11.1",
|
"mocha-junit-reporter": "1.11.1",
|
||||||
|
@ -68,23 +69,23 @@
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"cloudserver": "S3METADATA=mongodb npm-run-all --parallel start_dataserver start_s3server",
|
"cloudserver": "S3METADATA=mongodb npm-run-all --parallel start_dataserver start_s3server",
|
||||||
"ft_awssdk": "cd tests/functional/aws-node-sdk && mocha test/",
|
"ft_awssdk": "cd tests/functional/aws-node-sdk && jest test/",
|
||||||
"ft_awssdk_aws": "cd tests/functional/aws-node-sdk && AWS_ON_AIR=true mocha test/",
|
"ft_awssdk_aws": "cd tests/functional/aws-node-sdk && AWS_ON_AIR=true mocha test/",
|
||||||
"ft_awssdk_buckets": "cd tests/functional/aws-node-sdk && mocha test/bucket",
|
"ft_awssdk_buckets": "cd tests/functional/aws-node-sdk && jest test/bucket",
|
||||||
"ft_awssdk_objects_misc": "cd tests/functional/aws-node-sdk && mocha test/legacy test/object test/service test/support",
|
"ft_awssdk_objects_misc": "cd tests/functional/aws-node-sdk && jest test/legacy test/object test/service test/support",
|
||||||
"ft_awssdk_versioning": "cd tests/functional/aws-node-sdk && mocha test/versioning/",
|
"ft_awssdk_versioning": "cd tests/functional/aws-node-sdk && jest test/versioning/",
|
||||||
"ft_awssdk_external_backends": "cd tests/functional/aws-node-sdk && mocha test/multipleBackend",
|
"ft_awssdk_external_backends": "cd tests/functional/aws-node-sdk && jest test/multipleBackend",
|
||||||
"ft_management": "cd tests/functional/report && npm test",
|
"ft_management": "cd tests/functional/report && npm test",
|
||||||
"ft_node": "cd tests/functional/raw-node && npm test",
|
"ft_node": "cd tests/functional/raw-node && npm test",
|
||||||
"ft_node_routes": "cd tests/functional/raw-node && npm run test-routes",
|
"ft_node_routes": "cd tests/functional/raw-node && npm run test-routes",
|
||||||
"ft_gcp": "cd tests/functional/raw-node && npm run test-gcp",
|
"ft_gcp": "cd tests/functional/raw-node && npm run test-gcp",
|
||||||
"ft_healthchecks": "cd tests/functional/healthchecks && npm test",
|
"ft_healthchecks": "cd tests/functional/healthchecks && npm test",
|
||||||
"ft_s3cmd": "cd tests/functional/s3cmd && mocha -t 40000 *.js",
|
"ft_s3cmd": "cd tests/functional/s3cmd && jest -t 40000 *.js",
|
||||||
"ft_s3curl": "cd tests/functional/s3curl && mocha -t 40000 *.js",
|
"ft_s3curl": "cd tests/functional/s3curl && jest -t 40000 *.js",
|
||||||
"ft_util": "cd tests/functional/utilities && mocha -t 40000 *.js",
|
"ft_util": "cd tests/functional/utilities && jest -t 40000 *.js",
|
||||||
"ft_test": "npm-run-all -s ft_awssdk ft_s3cmd ft_s3curl ft_node ft_healthchecks ft_management ft_util",
|
"ft_test": "npm-run-all -s ft_awssdk ft_s3cmd ft_s3curl ft_node ft_healthchecks ft_management ft_util",
|
||||||
"ft_search": "cd tests/functional/aws-node-sdk && mocha -t 90000 test/mdSearch",
|
"ft_search": "cd tests/functional/aws-node-sdk && jest -t 90000 test/mdSearch",
|
||||||
"install_ft_deps": "npm install aws-sdk@2.28.0 bluebird@3.3.1 mocha@2.3.4 mocha-junit-reporter@1.11.1 tv4@1.2.7",
|
"install_ft_deps": "npm install aws-sdk@2.28.0 bluebird@3.3.1 mocha@2.3.4 mocha-junit-reporter@1.11.1 tv4@1.2.7 jest",
|
||||||
"lint": "eslint $(git ls-files '*.js')",
|
"lint": "eslint $(git ls-files '*.js')",
|
||||||
"lint_md": "mdlint $(git ls-files '*.md')",
|
"lint_md": "mdlint $(git ls-files '*.md')",
|
||||||
"mem_backend": "S3BACKEND=mem node index.js",
|
"mem_backend": "S3BACKEND=mem node index.js",
|
||||||
|
@ -99,10 +100,10 @@
|
||||||
"start_utapi": "node lib/utapi/utapi.js",
|
"start_utapi": "node lib/utapi/utapi.js",
|
||||||
"utapi_replay": "node lib/utapi/utapiReplay.js",
|
"utapi_replay": "node lib/utapi/utapiReplay.js",
|
||||||
"management_agent": "node managementAgent.js",
|
"management_agent": "node managementAgent.js",
|
||||||
"test": "CI=true S3BACKEND=mem mocha --recursive tests/unit",
|
"test": "jest",
|
||||||
"test_legacy_location": "CI=true S3_LOCATION_FILE=tests/locationConfig/locationConfigLegacy.json S3BACKEND=mem mocha --recursive tests/unit",
|
"test_legacy_location": "CI=true S3_LOCATION_FILE=tests/locationConfig/locationConfigLegacy.json S3BACKEND=mem jest tests/unit",
|
||||||
"multiple_backend_test": "CI=true S3BACKEND=mem S3DATA=multiple mocha -t 20000 --recursive tests/multipleBackend",
|
"multiple_backend_test": "CI=true S3BACKEND=mem S3DATA=multiple jest -t 20000 tests/multipleBackend",
|
||||||
"unit_coverage": "CI=true mkdir -p coverage/unit/ && S3BACKEND=mem MOCHA_FILE=$CIRCLE_TEST_REPORTS/unit/unit.xml istanbul cover --dir coverage/unit _mocha -- --reporter mocha-junit-reporter --recursive tests/unit",
|
"unit_coverage": "CI=true mkdir -p coverage/unit/ && S3BACKEND=mem MOCHA_FILE=$CIRCLE_TEST_REPORTS/unit/unit.xml istanbul cover --dir coverage/unit _mocha -- --reporter mocha-junit-reporter tests/unit",
|
||||||
"unit_coverage_legacy_location": "CI=true mkdir -p coverage/unitlegacylocation/ && S3_LOCATION_FILE=tests/locationConfig/locationConfigLegacy.json S3BACKEND=mem MOCHA_FILE=$CIRCLE_TEST_REPORTS/unit/unitlegacylocation.xml istanbul cover --dir coverage/unitlegacylocation _mocha -- --reporter mocha-junit-reporter --recursive tests/unit"
|
"unit_coverage_legacy_location": "CI=true mkdir -p coverage/unitlegacylocation/ && S3_LOCATION_FILE=tests/locationConfig/locationConfigLegacy.json S3BACKEND=mem MOCHA_FILE=$CIRCLE_TEST_REPORTS/unit/unitlegacylocation.xml istanbul cover --dir coverage/unitlegacylocation _mocha -- --reporter mocha-junit-reporter tests/unit"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,14 +61,12 @@ function methodRequest(params, callback) {
|
||||||
if (code) {
|
if (code) {
|
||||||
const message = Number.isNaN(parseInt(code, 10)) ?
|
const message = Number.isNaN(parseInt(code, 10)) ?
|
||||||
`<Code>${code}</Code>` : '';
|
`<Code>${code}</Code>` : '';
|
||||||
assert(total.indexOf(message) > -1, `Expected ${message}`);
|
expect(total.indexOf(message) > -1).toBeTruthy();
|
||||||
assert.deepEqual(res.statusCode, statusCode[code],
|
expect(res.statusCode).toEqual(statusCode[code]);
|
||||||
`status code expected: ${statusCode[code]}`);
|
|
||||||
}
|
}
|
||||||
if (headersResponse) {
|
if (headersResponse) {
|
||||||
Object.keys(headersResponse).forEach(key => {
|
Object.keys(headersResponse).forEach(key => {
|
||||||
assert.deepEqual(res.headers[key], headersResponse[key],
|
expect(res.headers[key]).toEqual(headersResponse[key]);
|
||||||
`error header: ${key}`);
|
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
// if no headersResponse provided, should not have these headers
|
// if no headersResponse provided, should not have these headers
|
||||||
|
@ -77,14 +75,12 @@ function methodRequest(params, callback) {
|
||||||
'access-control-allow-methods',
|
'access-control-allow-methods',
|
||||||
'access-control-allow-credentials',
|
'access-control-allow-credentials',
|
||||||
'vary'].forEach(key => {
|
'vary'].forEach(key => {
|
||||||
assert.strictEqual(res.headers[key], undefined,
|
expect(res.headers[key]).toBe(undefined);
|
||||||
`Error: ${key} should not have value`);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (headersOmitted) {
|
if (headersOmitted) {
|
||||||
headersOmitted.forEach(key => {
|
headersOmitted.forEach(key => {
|
||||||
assert.strictEqual(res.headers[key], undefined,
|
expect(res.headers[key]).toBe(undefined);
|
||||||
`Error: ${key} should not have value`);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
|
|
|
@ -28,11 +28,11 @@ function checkOneVersion(s3, bucket, versionId, callback) {
|
||||||
if (err) {
|
if (err) {
|
||||||
callback(err);
|
callback(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.Versions.length, 1);
|
expect(data.Versions.length).toBe(1);
|
||||||
if (versionId) {
|
if (versionId) {
|
||||||
assert.strictEqual(data.Versions[0].VersionId, versionId);
|
expect(data.Versions[0].VersionId).toBe(versionId);
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.DeleteMarkers.length, 0);
|
expect(data.DeleteMarkers.length).toBe(0);
|
||||||
callback();
|
callback();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -100,9 +100,8 @@ function createDualNullVersion(s3, bucketName, keyName, cb) {
|
||||||
// putting new version
|
// putting new version
|
||||||
next => s3.putObject({ Bucket: bucketName, Key: keyName },
|
next => s3.putObject({ Bucket: bucketName, Key: keyName },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
'Unexpected err putting new version');
|
expect(data.VersionId).toBeTruthy();
|
||||||
assert(data.VersionId);
|
|
||||||
next(null, data.VersionId);
|
next(null, data.VersionId);
|
||||||
}),
|
}),
|
||||||
// delete version we just created, master version should be updated
|
// delete version we just created, master version should be updated
|
||||||
|
@ -115,9 +114,8 @@ function createDualNullVersion(s3, bucketName, keyName, cb) {
|
||||||
// getting object should return null version now
|
// getting object should return null version now
|
||||||
next => s3.getObject({ Bucket: bucketName, Key: keyName },
|
next => s3.getObject({ Bucket: bucketName, Key: keyName },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
'Unexpected err getting latest version');
|
expect(data.VersionId).toBe('null');
|
||||||
assert.strictEqual(data.VersionId, 'null');
|
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], err => cb(err));
|
], err => cb(err));
|
||||||
|
|
|
@ -69,7 +69,7 @@ function _assertResponseHtml(response, elemtag, content) {
|
||||||
const startingTag = '<ul>';
|
const startingTag = '<ul>';
|
||||||
const startIndex = response.indexOf(startingTag);
|
const startIndex = response.indexOf(startingTag);
|
||||||
const endIndex = response.indexOf('</ul>');
|
const endIndex = response.indexOf('</ul>');
|
||||||
assert(startIndex > -1 && endIndex > -1, 'Did not find ul element');
|
expect(startIndex > -1 && endIndex > -1).toBeTruthy();
|
||||||
const ulElem = response.slice(startIndex + startingTag.length,
|
const ulElem = response.slice(startIndex + startingTag.length,
|
||||||
endIndex);
|
endIndex);
|
||||||
content.forEach(item => {
|
content.forEach(item => {
|
||||||
|
@ -77,36 +77,29 @@ function _assertResponseHtml(response, elemtag, content) {
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
const elem = `<${elemtag}>${content}</${elemtag}>`;
|
const elem = `<${elemtag}>${content}</${elemtag}>`;
|
||||||
assert(response.includes(elem),
|
expect(response.includes(elem)).toBeTruthy();
|
||||||
`Expected but did not find '${elem}' in html`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function _assertContainsHtml(responseBody) {
|
function _assertContainsHtml(responseBody) {
|
||||||
assert(responseBody.startsWith('<html>') &&
|
expect(responseBody.startsWith('<html>') &&
|
||||||
responseBody.includes('</html>'), 'Did not find html tags');
|
responseBody.includes('</html>')).toBeTruthy();
|
||||||
}
|
}
|
||||||
|
|
||||||
function _assertResponseHtml404(method, response, type) {
|
function _assertResponseHtml404(method, response, type) {
|
||||||
assert.strictEqual(response.statusCode, 404);
|
expect(response.statusCode).toBe(404);
|
||||||
if (method === 'HEAD') {
|
if (method === 'HEAD') {
|
||||||
if (type === '404-no-such-bucket') {
|
if (type === '404-no-such-bucket') {
|
||||||
assert.strictEqual(response.headers['x-amz-error-code'],
|
expect(response.headers['x-amz-error-code']).toBe('NoSuchBucket');
|
||||||
'NoSuchBucket');
|
|
||||||
// Need arsenal fixed to remove period at the end
|
// Need arsenal fixed to remove period at the end
|
||||||
// so compatible with aws
|
// so compatible with aws
|
||||||
assert.strictEqual(response.headers['x-amz-error-message'],
|
expect(response.headers['x-amz-error-message']).toBe('The specified bucket does not exist.');
|
||||||
'The specified bucket does not exist.');
|
|
||||||
} else if (type === '404-no-such-website-configuration') {
|
} else if (type === '404-no-such-website-configuration') {
|
||||||
assert.strictEqual(response.headers['x-amz-error-code'],
|
expect(response.headers['x-amz-error-code']).toBe('NoSuchWebsiteConfiguration');
|
||||||
'NoSuchWebsiteConfiguration');
|
expect(response.headers['x-amz-error-message']).toBe('The specified bucket does not have a website configuration');
|
||||||
assert.strictEqual(response.headers['x-amz-error-message'],
|
|
||||||
'The specified bucket does not have a website configuration');
|
|
||||||
} else if (type === '404-not-found') {
|
} else if (type === '404-not-found') {
|
||||||
assert.strictEqual(response.headers['x-amz-error-code'],
|
expect(response.headers['x-amz-error-code']).toBe('NoSuchKey');
|
||||||
'NoSuchKey');
|
expect(response.headers['x-amz-error-message']).toBe('The specified key does not exist.');
|
||||||
assert.strictEqual(response.headers['x-amz-error-message'],
|
|
||||||
'The specified key does not exist.');
|
|
||||||
} else {
|
} else {
|
||||||
throw new Error(`'${type}' is not a recognized 404 ` +
|
throw new Error(`'${type}' is not a recognized 404 ` +
|
||||||
'error checked in the WebsiteConfigTester.checkHTML function');
|
'error checked in the WebsiteConfigTester.checkHTML function');
|
||||||
|
@ -142,13 +135,11 @@ function _assertResponseHtml404(method, response, type) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function _assertResponseHtml403(method, response, type) {
|
function _assertResponseHtml403(method, response, type) {
|
||||||
assert.strictEqual(response.statusCode, 403);
|
expect(response.statusCode).toBe(403);
|
||||||
if (method === 'HEAD') {
|
if (method === 'HEAD') {
|
||||||
if (type === '403-access-denied') {
|
if (type === '403-access-denied') {
|
||||||
assert.strictEqual(response.headers['x-amz-error-code'],
|
expect(response.headers['x-amz-error-code']).toBe('AccessDenied');
|
||||||
'AccessDenied');
|
expect(response.headers['x-amz-error-message']).toBe('Access Denied');
|
||||||
assert.strictEqual(response.headers['x-amz-error-message'],
|
|
||||||
'Access Denied');
|
|
||||||
} else if (type !== '403-retrieve-error-document') {
|
} else if (type !== '403-retrieve-error-document') {
|
||||||
throw new Error(`'${type}' is not a recognized 403 ` +
|
throw new Error(`'${type}' is not a recognized 403 ` +
|
||||||
'error checked in the WebsiteConfigTester.checkHTML function');
|
'error checked in the WebsiteConfigTester.checkHTML function');
|
||||||
|
@ -183,9 +174,9 @@ function _assertResponseHtml403(method, response, type) {
|
||||||
|
|
||||||
function _assertResponseHtmlErrorUser(response, type) {
|
function _assertResponseHtmlErrorUser(response, type) {
|
||||||
if (type === 'error-user') {
|
if (type === 'error-user') {
|
||||||
assert.strictEqual(response.statusCode, 403);
|
expect(response.statusCode).toBe(403);
|
||||||
} else if (type === 'error-user-404') {
|
} else if (type === 'error-user-404') {
|
||||||
assert.strictEqual(response.statusCode, 404);
|
expect(response.statusCode).toBe(404);
|
||||||
}
|
}
|
||||||
_assertResponseHtml(response.body, 'title',
|
_assertResponseHtml(response.body, 'title',
|
||||||
'Error!!');
|
'Error!!');
|
||||||
|
@ -194,7 +185,7 @@ function _assertResponseHtmlErrorUser(response, type) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function _assertResponseHtmlIndexUser(response) {
|
function _assertResponseHtmlIndexUser(response) {
|
||||||
assert.strictEqual(response.statusCode, 200);
|
expect(response.statusCode).toBe(200);
|
||||||
_assertResponseHtml(response.body, 'title',
|
_assertResponseHtml(response.body, 'title',
|
||||||
'Best testing website ever');
|
'Best testing website ever');
|
||||||
_assertResponseHtml(response.body, 'h1', 'Welcome to my ' +
|
_assertResponseHtml(response.body, 'h1', 'Welcome to my ' +
|
||||||
|
@ -203,11 +194,11 @@ function _assertResponseHtmlIndexUser(response) {
|
||||||
|
|
||||||
function _assertResponseHtmlRedirect(response, type, redirectUrl, method) {
|
function _assertResponseHtmlRedirect(response, type, redirectUrl, method) {
|
||||||
if (type === 'redirect' || type === 'redirect-user') {
|
if (type === 'redirect' || type === 'redirect-user') {
|
||||||
assert.strictEqual(response.statusCode, 301);
|
expect(response.statusCode).toBe(301);
|
||||||
assert.strictEqual(response.body, '');
|
expect(response.body).toBe('');
|
||||||
assert.strictEqual(response.headers.location, redirectUrl);
|
expect(response.headers.location).toBe(redirectUrl);
|
||||||
} else if (type === 'redirected-user') {
|
} else if (type === 'redirected-user') {
|
||||||
assert.strictEqual(response.statusCode, 200);
|
expect(response.statusCode).toBe(200);
|
||||||
if (method === 'HEAD') {
|
if (method === 'HEAD') {
|
||||||
return;
|
return;
|
||||||
// no need to check HTML
|
// no need to check HTML
|
||||||
|
@ -270,7 +261,7 @@ class WebsiteConfigTester {
|
||||||
static checkHTML(params, callback) {
|
static checkHTML(params, callback) {
|
||||||
const { method, responseType, auth, url, redirectUrl } = params;
|
const { method, responseType, auth, url, redirectUrl } = params;
|
||||||
_makeWebsiteRequest(auth, method, url, (err, res) => {
|
_makeWebsiteRequest(auth, method, url, (err, res) => {
|
||||||
assert.strictEqual(err, null, `Unexpected request err ${err}`);
|
expect(err).toBe(null);
|
||||||
if (responseType) {
|
if (responseType) {
|
||||||
if (responseType.startsWith('404')) {
|
if (responseType.startsWith('404')) {
|
||||||
_assertResponseHtml404(method, res, responseType);
|
_assertResponseHtml404(method, res, responseType);
|
||||||
|
@ -315,11 +306,10 @@ class WebsiteConfigTester {
|
||||||
_makeWebsiteRequest(auth, 'HEAD', url, (err, res) => {
|
_makeWebsiteRequest(auth, 'HEAD', url, (err, res) => {
|
||||||
// body should be empty
|
// body should be empty
|
||||||
assert.deepStrictEqual(res.body, '');
|
assert.deepStrictEqual(res.body, '');
|
||||||
assert.strictEqual(res.statusCode, expectedStatusCode);
|
expect(res.statusCode).toBe(expectedStatusCode);
|
||||||
const headers = Object.keys(expectedHeaders);
|
const headers = Object.keys(expectedHeaders);
|
||||||
headers.forEach(header => {
|
headers.forEach(header => {
|
||||||
assert.strictEqual(res.headers[header],
|
expect(res.headers[header]).toBe(expectedHeaders[header]);
|
||||||
expectedHeaders[header]);
|
|
||||||
});
|
});
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
|
|
|
@ -18,13 +18,10 @@ const basicRule = {
|
||||||
// Check for the expected error response code and status code.
|
// Check for the expected error response code and status code.
|
||||||
function assertError(err, expectedErr, cb) {
|
function assertError(err, expectedErr, cb) {
|
||||||
if (expectedErr === null) {
|
if (expectedErr === null) {
|
||||||
assert.strictEqual(err, null, `expected no error but got '${err}'`);
|
expect(err).toBe(null);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(err.code, expectedErr, 'incorrect error response ' +
|
expect(err.code).toBe(expectedErr);
|
||||||
`code: should be '${expectedErr}' but got '${err.code}'`);
|
expect(err.statusCode).toBe(errors[expectedErr].code);
|
||||||
assert.strictEqual(err.statusCode, errors[expectedErr].code,
|
|
||||||
'incorrect error status code: should be 400 but got ' +
|
|
||||||
`'${err.statusCode}'`);
|
|
||||||
}
|
}
|
||||||
cb();
|
cb();
|
||||||
}
|
}
|
||||||
|
@ -33,14 +30,14 @@ describe('aws-sdk test delete bucket lifecycle', () => {
|
||||||
let s3;
|
let s3;
|
||||||
let otherAccountS3;
|
let otherAccountS3;
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
const config = getConfig('default', { signatureVersion: 'v4' });
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
s3 = new S3(config);
|
s3 = new S3(config);
|
||||||
otherAccountS3 = new BucketUtility('lisa', {}).s3;
|
otherAccountS3 = new BucketUtility('lisa', {}).s3;
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return NoSuchBucket error if bucket does not exist', done => {
|
test('should return NoSuchBucket error if bucket does not exist', done => {
|
||||||
s3.deleteBucketLifecycle({ Bucket: bucket }, err =>
|
s3.deleteBucketLifecycle({ Bucket: bucket }, err =>
|
||||||
assertError(err, 'NoSuchBucket', done));
|
assertError(err, 'NoSuchBucket', done));
|
||||||
});
|
});
|
||||||
|
@ -50,23 +47,23 @@ describe('aws-sdk test delete bucket lifecycle', () => {
|
||||||
|
|
||||||
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
||||||
|
|
||||||
it('should return AccessDenied if user is not bucket owner', done => {
|
test('should return AccessDenied if user is not bucket owner', done => {
|
||||||
otherAccountS3.deleteBucketLifecycle({ Bucket: bucket },
|
otherAccountS3.deleteBucketLifecycle({ Bucket: bucket },
|
||||||
err => assertError(err, 'AccessDenied', done));
|
err => assertError(err, 'AccessDenied', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return no error if no lifecycle config on bucket', done => {
|
test('should return no error if no lifecycle config on bucket', done => {
|
||||||
s3.deleteBucketLifecycle({ Bucket: bucket }, err =>
|
s3.deleteBucketLifecycle({ Bucket: bucket }, err =>
|
||||||
assertError(err, null, done));
|
assertError(err, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete lifecycle configuration from bucket', done => {
|
test('should delete lifecycle configuration from bucket', done => {
|
||||||
const params = { Bucket: bucket,
|
const params = { Bucket: bucket,
|
||||||
LifecycleConfiguration: { Rules: [basicRule] } };
|
LifecycleConfiguration: { Rules: [basicRule] } };
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
s3.deleteBucketLifecycle({ Bucket: bucket }, err => {
|
s3.deleteBucketLifecycle({ Bucket: bucket }, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
s3.getBucketLifecycleConfiguration({ Bucket: bucket },
|
s3.getBucketLifecycleConfiguration({ Bucket: bucket },
|
||||||
err =>
|
err =>
|
||||||
assertError(err, 'NoSuchLifecycleConfiguration', done));
|
assertError(err, 'NoSuchLifecycleConfiguration', done));
|
||||||
|
|
|
@ -41,7 +41,7 @@ describe('aws-node-sdk test deleteBucketReplication', () => {
|
||||||
|
|
||||||
function deleteReplicationAndCheckResponse(bucket, cb) {
|
function deleteReplicationAndCheckResponse(bucket, cb) {
|
||||||
return s3.deleteBucketReplication({ Bucket: bucket }, (err, data) => {
|
return s3.deleteBucketReplication({ Bucket: bucket }, (err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
assert.deepStrictEqual(data, {});
|
assert.deepStrictEqual(data, {});
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
|
@ -55,17 +55,17 @@ describe('aws-node-sdk test deleteBucketReplication', () => {
|
||||||
|
|
||||||
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
||||||
|
|
||||||
it('should return empty object if bucket has no replication config', done =>
|
test('should return empty object if bucket has no replication config', done =>
|
||||||
deleteReplicationAndCheckResponse(bucket, done));
|
deleteReplicationAndCheckResponse(bucket, done));
|
||||||
|
|
||||||
it('should delete a bucket replication config when it has one', done =>
|
test('should delete a bucket replication config when it has one', done =>
|
||||||
series([
|
series([
|
||||||
next => putVersioningOnBucket(bucket, next),
|
next => putVersioningOnBucket(bucket, next),
|
||||||
next => putReplicationOnBucket(bucket, next),
|
next => putReplicationOnBucket(bucket, next),
|
||||||
next => deleteReplicationAndCheckResponse(bucket, next),
|
next => deleteReplicationAndCheckResponse(bucket, next),
|
||||||
], done));
|
], done));
|
||||||
|
|
||||||
it('should return ReplicationConfigurationNotFoundError if getting ' +
|
test('should return ReplicationConfigurationNotFoundError if getting ' +
|
||||||
'replication config after it has been deleted', done =>
|
'replication config after it has been deleted', done =>
|
||||||
series([
|
series([
|
||||||
next => putVersioningOnBucket(bucket, next),
|
next => putVersioningOnBucket(bucket, next),
|
||||||
|
@ -81,16 +81,16 @@ describe('aws-node-sdk test deleteBucketReplication', () => {
|
||||||
}),
|
}),
|
||||||
next => deleteReplicationAndCheckResponse(bucket, next),
|
next => deleteReplicationAndCheckResponse(bucket, next),
|
||||||
next => s3.getBucketReplication({ Bucket: bucket }, err => {
|
next => s3.getBucketReplication({ Bucket: bucket }, err => {
|
||||||
assert(errors.ReplicationConfigurationNotFoundError[err.code]);
|
expect(errors.ReplicationConfigurationNotFoundError[err.code]).toBeTruthy();
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
], done));
|
], done));
|
||||||
|
|
||||||
it('should return AccessDenied if user is not bucket owner', done =>
|
test('should return AccessDenied if user is not bucket owner', done =>
|
||||||
otherAccountS3.deleteBucketReplication({ Bucket: bucket }, err => {
|
otherAccountS3.deleteBucketReplication({ Bucket: bucket }, err => {
|
||||||
assert(err);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'AccessDenied');
|
expect(err.code).toBe('AccessDenied');
|
||||||
assert.strictEqual(err.statusCode, 403);
|
expect(err.statusCode).toBe(403);
|
||||||
return done();
|
return done();
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
|
|
|
@ -26,11 +26,11 @@ describe('DELETE bucket cors', () => {
|
||||||
const otherAccountS3 = otherAccountBucketUtility.s3;
|
const otherAccountS3 = otherAccountBucketUtility.s3;
|
||||||
|
|
||||||
describe('without existing bucket', () => {
|
describe('without existing bucket', () => {
|
||||||
it('should return NoSuchBucket', done => {
|
test('should return NoSuchBucket', done => {
|
||||||
s3.deleteBucketCors({ Bucket: bucketName }, err => {
|
s3.deleteBucketCors({ Bucket: bucketName }, err => {
|
||||||
assert(err);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'NoSuchBucket');
|
expect(err.code).toBe('NoSuchBucket');
|
||||||
assert.strictEqual(err.statusCode, 404);
|
expect(err.statusCode).toBe(404);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -41,14 +41,12 @@ describe('DELETE bucket cors', () => {
|
||||||
afterEach(() => bucketUtil.deleteOne(bucketName));
|
afterEach(() => bucketUtil.deleteOne(bucketName));
|
||||||
|
|
||||||
describe('without existing cors configuration', () => {
|
describe('without existing cors configuration', () => {
|
||||||
it('should return a 204 response', done => {
|
test('should return a 204 response', done => {
|
||||||
s3.deleteBucketCors({ Bucket: bucketName },
|
s3.deleteBucketCors({ Bucket: bucketName },
|
||||||
function deleteBucketCors(err) {
|
function deleteBucketCors(err) {
|
||||||
const statusCode = this.httpResponse.statusCode;
|
const statusCode = this.httpResponse.statusCode;
|
||||||
assert.strictEqual(statusCode, 204,
|
expect(statusCode).toBe(204);
|
||||||
`Found unexpected statusCode ${statusCode}`);
|
expect(err).toBe(null);
|
||||||
assert.strictEqual(err, null,
|
|
||||||
`Found unexpected err ${err}`);
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -60,18 +58,15 @@ describe('DELETE bucket cors', () => {
|
||||||
CORSConfiguration: sampleCors }, done);
|
CORSConfiguration: sampleCors }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete bucket configuration successfully', done => {
|
test('should delete bucket configuration successfully', done => {
|
||||||
s3.deleteBucketCors({ Bucket: bucketName },
|
s3.deleteBucketCors({ Bucket: bucketName },
|
||||||
function deleteBucketCors(err) {
|
function deleteBucketCors(err) {
|
||||||
const statusCode = this.httpResponse.statusCode;
|
const statusCode = this.httpResponse.statusCode;
|
||||||
assert.strictEqual(statusCode, 204,
|
expect(statusCode).toBe(204);
|
||||||
`Found unexpected statusCode ${statusCode}`);
|
expect(err).toBe(null);
|
||||||
assert.strictEqual(err, null,
|
|
||||||
`Found unexpected err ${err}`);
|
|
||||||
s3.getBucketCors({ Bucket: bucketName }, err => {
|
s3.getBucketCors({ Bucket: bucketName }, err => {
|
||||||
assert.strictEqual(err.code,
|
expect(err.code).toBe('NoSuchCORSConfiguration');
|
||||||
'NoSuchCORSConfiguration');
|
expect(err.statusCode).toBe(404);
|
||||||
assert.strictEqual(err.statusCode, 404);
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -87,9 +82,9 @@ describe('DELETE bucket cors', () => {
|
||||||
'owner', done => {
|
'owner', done => {
|
||||||
otherAccountS3.deleteBucketCors({ Bucket: bucketName },
|
otherAccountS3.deleteBucketCors({ Bucket: bucketName },
|
||||||
err => {
|
err => {
|
||||||
assert(err);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'AccessDenied');
|
expect(err.code).toBe('AccessDenied');
|
||||||
assert.strictEqual(err.statusCode, 403);
|
expect(err.statusCode).toBe(403);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -14,11 +14,11 @@ describe('DELETE bucket website', () => {
|
||||||
const otherAccountS3 = otherAccountBucketUtility.s3;
|
const otherAccountS3 = otherAccountBucketUtility.s3;
|
||||||
|
|
||||||
describe('without existing bucket', () => {
|
describe('without existing bucket', () => {
|
||||||
it('should return NoSuchBucket', done => {
|
test('should return NoSuchBucket', done => {
|
||||||
s3.deleteBucketWebsite({ Bucket: bucketName }, err => {
|
s3.deleteBucketWebsite({ Bucket: bucketName }, err => {
|
||||||
assert(err);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'NoSuchBucket');
|
expect(err.code).toBe('NoSuchBucket');
|
||||||
assert.strictEqual(err.statusCode, 404);
|
expect(err.statusCode).toBe(404);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -29,15 +29,13 @@ describe('DELETE bucket website', () => {
|
||||||
afterEach(() => bucketUtil.deleteOne(bucketName));
|
afterEach(() => bucketUtil.deleteOne(bucketName));
|
||||||
|
|
||||||
describe('without existing configuration', () => {
|
describe('without existing configuration', () => {
|
||||||
it('should return a 204 response', done => {
|
test('should return a 204 response', done => {
|
||||||
const request =
|
const request =
|
||||||
s3.deleteBucketWebsite({ Bucket: bucketName }, err => {
|
s3.deleteBucketWebsite({ Bucket: bucketName }, err => {
|
||||||
const statusCode =
|
const statusCode =
|
||||||
request.response.httpResponse.statusCode;
|
request.response.httpResponse.statusCode;
|
||||||
assert.strictEqual(statusCode, 204,
|
expect(statusCode).toBe(204);
|
||||||
`Found unexpected statusCode ${statusCode}`);
|
expect(err).toBe(null);
|
||||||
assert.strictEqual(err, null,
|
|
||||||
`Found unexpected err ${err}`);
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -50,21 +48,19 @@ describe('DELETE bucket website', () => {
|
||||||
WebsiteConfiguration: config }, done);
|
WebsiteConfiguration: config }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete bucket configuration successfully', done => {
|
test('should delete bucket configuration successfully', done => {
|
||||||
s3.deleteBucketWebsite({ Bucket: bucketName }, err => {
|
s3.deleteBucketWebsite({ Bucket: bucketName }, err => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Found unexpected err ${err}`);
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return AccessDenied if user is not bucket owner',
|
test('should return AccessDenied if user is not bucket owner', done => {
|
||||||
done => {
|
|
||||||
otherAccountS3.deleteBucketWebsite({ Bucket: bucketName },
|
otherAccountS3.deleteBucketWebsite({ Bucket: bucketName },
|
||||||
err => {
|
err => {
|
||||||
assert(err);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'AccessDenied');
|
expect(err.code).toBe('AccessDenied');
|
||||||
assert.strictEqual(err.statusCode, 403);
|
expect(err.statusCode).toBe(403);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -26,14 +26,14 @@ const tests = [
|
||||||
// ETag should include quotes around value
|
// ETag should include quotes around value
|
||||||
const emptyObjectHash =
|
const emptyObjectHash =
|
||||||
'"d41d8cd98f00b204e9800998ecf8427e"';
|
'"d41d8cd98f00b204e9800998ecf8427e"';
|
||||||
assert.equal(data.Name, Bucket, 'Bucket name mismatch');
|
expect(data.Name).toEqual(Bucket);
|
||||||
assert.deepEqual(keys, [
|
expect(keys).toEqual([
|
||||||
'testA/',
|
'testA/',
|
||||||
'testA/test.json',
|
'testA/test.json',
|
||||||
'testA/test/test.json',
|
'testA/test/test.json',
|
||||||
'testB/',
|
'testB/',
|
||||||
'testB/test.json',
|
'testB/test.json',
|
||||||
], 'Bucket content mismatch');
|
]);
|
||||||
assert.deepStrictEqual(data.Contents[0].ETag,
|
assert.deepStrictEqual(data.Contents[0].ETag,
|
||||||
emptyObjectHash, 'Object hash mismatch');
|
emptyObjectHash, 'Object hash mismatch');
|
||||||
},
|
},
|
||||||
|
@ -51,11 +51,11 @@ const tests = [
|
||||||
listObjectParams: Bucket => ({ Bucket, Delimiter: '/' }),
|
listObjectParams: Bucket => ({ Bucket, Delimiter: '/' }),
|
||||||
assertions: (data, Bucket) => {
|
assertions: (data, Bucket) => {
|
||||||
const prefixes = data.CommonPrefixes.map(cp => cp.Prefix);
|
const prefixes = data.CommonPrefixes.map(cp => cp.Prefix);
|
||||||
assert.equal(data.Name, Bucket, 'Bucket name mismatch');
|
expect(data.Name).toEqual(Bucket);
|
||||||
assert.deepEqual(prefixes, [
|
expect(prefixes).toEqual([
|
||||||
'testA/',
|
'testA/',
|
||||||
'testB/',
|
'testB/',
|
||||||
], 'Bucket content mismatch');
|
]);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -69,11 +69,11 @@ const tests = [
|
||||||
listObjectParams: Bucket => ({ Bucket, Delimiter: '%' }),
|
listObjectParams: Bucket => ({ Bucket, Delimiter: '%' }),
|
||||||
assertions: data => {
|
assertions: data => {
|
||||||
const prefixes = data.CommonPrefixes.map(cp => cp.Prefix);
|
const prefixes = data.CommonPrefixes.map(cp => cp.Prefix);
|
||||||
assert.deepEqual(prefixes, [
|
expect(prefixes).toEqual([
|
||||||
'testA%',
|
'testA%',
|
||||||
'testB%',
|
'testB%',
|
||||||
'testC%',
|
'testC%',
|
||||||
], 'Bucket content mismatch');
|
]);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -90,8 +90,8 @@ const tests = [
|
||||||
listObjectParams: Bucket => ({ Bucket }),
|
listObjectParams: Bucket => ({ Bucket }),
|
||||||
assertions: (data, Bucket) => {
|
assertions: (data, Bucket) => {
|
||||||
const keys = data.Contents.map(object => object.Key);
|
const keys = data.Contents.map(object => object.Key);
|
||||||
assert.equal(data.Name, Bucket, 'Bucket name mismatch');
|
expect(data.Name).toEqual(Bucket);
|
||||||
assert.deepEqual(keys, [
|
expect(keys).toEqual([
|
||||||
/* These object names are intentionally listed in a
|
/* These object names are intentionally listed in a
|
||||||
different order than they were created to additionally
|
different order than they were created to additionally
|
||||||
test that they are listed alphabetically. */
|
test that they are listed alphabetically. */
|
||||||
|
@ -101,7 +101,7 @@ const tests = [
|
||||||
'whiteSpace/',
|
'whiteSpace/',
|
||||||
'whiteSpace/one whiteSpace',
|
'whiteSpace/one whiteSpace',
|
||||||
'whiteSpace/two white spaces',
|
'whiteSpace/two white spaces',
|
||||||
], 'Bucket content mismatch');
|
]);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -202,8 +202,8 @@ const tests = [
|
||||||
listObjectParams: Bucket => ({ Bucket }),
|
listObjectParams: Bucket => ({ Bucket }),
|
||||||
assertions: (data, Bucket) => {
|
assertions: (data, Bucket) => {
|
||||||
const keys = data.Contents.map(object => object.Key);
|
const keys = data.Contents.map(object => object.Key);
|
||||||
assert.equal(data.Name, Bucket, 'Bucket name mismatch');
|
expect(data.Name).toEqual(Bucket);
|
||||||
assert.deepEqual(keys, [
|
expect(keys).toEqual([
|
||||||
/* These object names are intentionally listed in a
|
/* These object names are intentionally listed in a
|
||||||
different order than they were created to additionally
|
different order than they were created to additionally
|
||||||
test that they are listed alphabetically. */
|
test that they are listed alphabetically. */
|
||||||
|
@ -265,7 +265,7 @@ const tests = [
|
||||||
'日japaneseMountainObjTitle',
|
'日japaneseMountainObjTitle',
|
||||||
'日japaneseMountainObjTitle/objTitleA',
|
'日japaneseMountainObjTitle/objTitleA',
|
||||||
'日japaneseMountainObjTitle/日japaneseMountainObjTitle',
|
'日japaneseMountainObjTitle/日japaneseMountainObjTitle',
|
||||||
], 'Bucket content mismatch');
|
]);
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -291,7 +291,7 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
let bucketUtil;
|
let bucketUtil;
|
||||||
let bucketName;
|
let bucketName;
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
bucketUtil = new BucketUtility();
|
bucketUtil = new BucketUtility();
|
||||||
bucketUtil.createRandom(1)
|
bucketUtil.createRandom(1)
|
||||||
.then(created => {
|
.then(created => {
|
||||||
|
@ -301,19 +301,19 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
.catch(done);
|
.catch(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
bucketUtil.deleteOne(bucketName)
|
bucketUtil.deleteOne(bucketName)
|
||||||
.then(() => done())
|
.then(() => done())
|
||||||
.catch(done);
|
.catch(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 and AccessDenied on a private bucket', done => {
|
test('should return 403 and AccessDenied on a private bucket', done => {
|
||||||
const params = { Bucket: bucketName };
|
const params = { Bucket: bucketName };
|
||||||
bucketUtil.s3
|
bucketUtil.s3
|
||||||
.makeUnauthenticatedRequest('listObjects', params, error => {
|
.makeUnauthenticatedRequest('listObjects', params, error => {
|
||||||
assert(error);
|
expect(error).toBeTruthy();
|
||||||
assert.strictEqual(error.statusCode, 403);
|
expect(error.statusCode).toBe(403);
|
||||||
assert.strictEqual(error.code, 'AccessDenied');
|
expect(error.code).toBe('AccessDenied');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -323,7 +323,7 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
let bucketUtil;
|
let bucketUtil;
|
||||||
let bucketName;
|
let bucketName;
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
bucketUtil.createRandom(1)
|
bucketUtil.createRandom(1)
|
||||||
.then(created => {
|
.then(created => {
|
||||||
|
@ -333,7 +333,7 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
.catch(done);
|
.catch(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
bucketUtil.deleteOne(bucketName).then(() => done()).catch(done);
|
bucketUtil.deleteOne(bucketName).then(() => done()).catch(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -342,7 +342,7 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
tests.forEach(test => {
|
tests.forEach(test => {
|
||||||
it(`should ${test.name}`, done => {
|
test(`should ${test.name}`, done => {
|
||||||
const s3 = bucketUtil.s3;
|
const s3 = bucketUtil.s3;
|
||||||
const Bucket = bucketName;
|
const Bucket = bucketName;
|
||||||
|
|
||||||
|
@ -367,7 +367,7 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
tests.forEach(test => {
|
tests.forEach(test => {
|
||||||
it(`v2 should ${test.name}`, done => {
|
test(`v2 should ${test.name}`, done => {
|
||||||
const s3 = bucketUtil.s3;
|
const s3 = bucketUtil.s3;
|
||||||
const Bucket = bucketName;
|
const Bucket = bucketName;
|
||||||
|
|
||||||
|
@ -392,7 +392,7 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
['&', '"quot', '\'apos', '<lt', '>gt'].forEach(k => {
|
['&', '"quot', '\'apos', '<lt', '>gt'].forEach(k => {
|
||||||
it(`should list objects with key ${k} as Prefix`, done => {
|
test(`should list objects with key ${k} as Prefix`, done => {
|
||||||
const s3 = bucketUtil.s3;
|
const s3 = bucketUtil.s3;
|
||||||
const Bucket = bucketName;
|
const Bucket = bucketName;
|
||||||
const objects = [{ Bucket, Key: k }];
|
const objects = [{ Bucket, Key: k }];
|
||||||
|
@ -416,7 +416,7 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
['&', '"quot', '\'apos', '<lt', '>gt'].forEach(k => {
|
['&', '"quot', '\'apos', '<lt', '>gt'].forEach(k => {
|
||||||
it(`should list objects with key ${k} as Marker`, done => {
|
test(`should list objects with key ${k} as Marker`, done => {
|
||||||
const s3 = bucketUtil.s3;
|
const s3 = bucketUtil.s3;
|
||||||
const Bucket = bucketName;
|
const Bucket = bucketName;
|
||||||
const objects = [{ Bucket, Key: k }];
|
const objects = [{ Bucket, Key: k }];
|
||||||
|
@ -440,7 +440,7 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
['&', '"quot', '\'apos', '<lt', '>gt'].forEach(k => {
|
['&', '"quot', '\'apos', '<lt', '>gt'].forEach(k => {
|
||||||
it(`should list objects with key ${k} as NextMarker`, done => {
|
test(`should list objects with key ${k} as NextMarker`, done => {
|
||||||
const s3 = bucketUtil.s3;
|
const s3 = bucketUtil.s3;
|
||||||
const Bucket = bucketName;
|
const Bucket = bucketName;
|
||||||
const objects = [{ Bucket, Key: k }, { Bucket, Key: 'zzz' }];
|
const objects = [{ Bucket, Key: k }, { Bucket, Key: 'zzz' }];
|
||||||
|
@ -457,7 +457,7 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
}
|
}
|
||||||
return data;
|
return data;
|
||||||
}).then(data => {
|
}).then(data => {
|
||||||
assert.strictEqual(data.NextMarker, k);
|
expect(data.NextMarker).toBe(k);
|
||||||
done();
|
done();
|
||||||
})
|
})
|
||||||
.catch(done);
|
.catch(done);
|
||||||
|
@ -465,7 +465,7 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
['&', '"quot', '\'apos', '<lt', '>gt'].forEach(k => {
|
['&', '"quot', '\'apos', '<lt', '>gt'].forEach(k => {
|
||||||
it(`should list objects with key ${k} as StartAfter`, done => {
|
test(`should list objects with key ${k} as StartAfter`, done => {
|
||||||
const s3 = bucketUtil.s3;
|
const s3 = bucketUtil.s3;
|
||||||
const Bucket = bucketName;
|
const Bucket = bucketName;
|
||||||
const objects = [{ Bucket, Key: k }];
|
const objects = [{ Bucket, Key: k }];
|
||||||
|
@ -490,8 +490,7 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
['&', '"quot', '\'apos', '<lt', '>gt'].forEach(k => {
|
['&', '"quot', '\'apos', '<lt', '>gt'].forEach(k => {
|
||||||
it(`should list objects with key ${k} as ContinuationToken`,
|
test(`should list objects with key ${k} as ContinuationToken`, done => {
|
||||||
done => {
|
|
||||||
const s3 = bucketUtil.s3;
|
const s3 = bucketUtil.s3;
|
||||||
const Bucket = bucketName;
|
const Bucket = bucketName;
|
||||||
const objects = [{ Bucket, Key: k }];
|
const objects = [{ Bucket, Key: k }];
|
||||||
|
@ -517,8 +516,7 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
['&', '"quot', '\'apos', '<lt', '>gt'].forEach(k => {
|
['&', '"quot', '\'apos', '<lt', '>gt'].forEach(k => {
|
||||||
it(`should list objects with key ${k} as NextContinuationToken`,
|
test(`should list objects with key ${k} as NextContinuationToken`, done => {
|
||||||
done => {
|
|
||||||
const s3 = bucketUtil.s3;
|
const s3 = bucketUtil.s3;
|
||||||
const Bucket = bucketName;
|
const Bucket = bucketName;
|
||||||
const objects = [{ Bucket, Key: k }, { Bucket, Key: 'zzz' }];
|
const objects = [{ Bucket, Key: k }, { Bucket, Key: 'zzz' }];
|
||||||
|
@ -534,8 +532,7 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
||||||
}
|
}
|
||||||
return data;
|
return data;
|
||||||
}).then(data => {
|
}).then(data => {
|
||||||
assert.strictEqual(
|
expect(decryptToken(data.NextContinuationToken)).toBe(k);
|
||||||
decryptToken(data.NextContinuationToken), k);
|
|
||||||
done();
|
done();
|
||||||
})
|
})
|
||||||
.catch(done);
|
.catch(done);
|
||||||
|
|
|
@ -10,13 +10,10 @@ const bucket = 'lifecycletestbucket';
|
||||||
// Check for the expected error response code and status code.
|
// Check for the expected error response code and status code.
|
||||||
function assertError(err, expectedErr, cb) {
|
function assertError(err, expectedErr, cb) {
|
||||||
if (expectedErr === null) {
|
if (expectedErr === null) {
|
||||||
assert.strictEqual(err, null, `expected no error but got '${err}'`);
|
expect(err).toBe(null);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(err.code, expectedErr, 'incorrect error response ' +
|
expect(err.code).toBe(expectedErr);
|
||||||
`code: should be '${expectedErr}' but got '${err.code}'`);
|
expect(err.statusCode).toBe(errors[expectedErr].code);
|
||||||
assert.strictEqual(err.statusCode, errors[expectedErr].code,
|
|
||||||
'incorrect error status code: should be 400 but got ' +
|
|
||||||
`'${err.statusCode}'`);
|
|
||||||
}
|
}
|
||||||
cb();
|
cb();
|
||||||
}
|
}
|
||||||
|
@ -25,14 +22,14 @@ describe('aws-sdk test get bucket lifecycle', () => {
|
||||||
let s3;
|
let s3;
|
||||||
let otherAccountS3;
|
let otherAccountS3;
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
const config = getConfig('default', { signatureVersion: 'v4' });
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
s3 = new S3(config);
|
s3 = new S3(config);
|
||||||
otherAccountS3 = new BucketUtility('lisa', {}).s3;
|
otherAccountS3 = new BucketUtility('lisa', {}).s3;
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return NoSuchBucket error if bucket does not exist', done => {
|
test('should return NoSuchBucket error if bucket does not exist', done => {
|
||||||
s3.getBucketLifecycleConfiguration({ Bucket: bucket }, err =>
|
s3.getBucketLifecycleConfiguration({ Bucket: bucket }, err =>
|
||||||
assertError(err, 'NoSuchBucket', done));
|
assertError(err, 'NoSuchBucket', done));
|
||||||
});
|
});
|
||||||
|
@ -42,19 +39,19 @@ describe('aws-sdk test get bucket lifecycle', () => {
|
||||||
|
|
||||||
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
||||||
|
|
||||||
it('should return AccessDenied if user is not bucket owner', done => {
|
test('should return AccessDenied if user is not bucket owner', done => {
|
||||||
otherAccountS3.getBucketLifecycleConfiguration({ Bucket: bucket },
|
otherAccountS3.getBucketLifecycleConfiguration({ Bucket: bucket },
|
||||||
err => assertError(err, 'AccessDenied', done));
|
err => assertError(err, 'AccessDenied', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return NoSuchLifecycleConfiguration error if no lifecycle ' +
|
test('should return NoSuchLifecycleConfiguration error if no lifecycle ' +
|
||||||
'put to bucket', done => {
|
'put to bucket', done => {
|
||||||
s3.getBucketLifecycleConfiguration({ Bucket: bucket }, err => {
|
s3.getBucketLifecycleConfiguration({ Bucket: bucket }, err => {
|
||||||
assertError(err, 'NoSuchLifecycleConfiguration', done);
|
assertError(err, 'NoSuchLifecycleConfiguration', done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get bucket lifecycle config with top-level prefix', done =>
|
test('should get bucket lifecycle config with top-level prefix', done =>
|
||||||
s3.putBucketLifecycleConfiguration({
|
s3.putBucketLifecycleConfiguration({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
LifecycleConfiguration: {
|
LifecycleConfiguration: {
|
||||||
|
@ -66,12 +63,11 @@ describe('aws-sdk test get bucket lifecycle', () => {
|
||||||
}],
|
}],
|
||||||
},
|
},
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, `Err putting lifecycle config: ${err}`);
|
expect(err).toEqual(null);
|
||||||
s3.getBucketLifecycleConfiguration({ Bucket: bucket },
|
s3.getBucketLifecycleConfiguration({ Bucket: bucket },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'Error getting lifecycle config: ' +
|
expect(err).toEqual(null);
|
||||||
`${err}`);
|
expect(res.Rules.length).toBe(1);
|
||||||
assert.strictEqual(res.Rules.length, 1);
|
|
||||||
assert.deepStrictEqual(res.Rules[0], {
|
assert.deepStrictEqual(res.Rules[0], {
|
||||||
Expiration: { Days: 1 },
|
Expiration: { Days: 1 },
|
||||||
ID: 'test-id',
|
ID: 'test-id',
|
||||||
|
@ -84,7 +80,7 @@ describe('aws-sdk test get bucket lifecycle', () => {
|
||||||
});
|
});
|
||||||
}));
|
}));
|
||||||
|
|
||||||
it('should get bucket lifecycle config with filter prefix', done =>
|
test('should get bucket lifecycle config with filter prefix', done =>
|
||||||
s3.putBucketLifecycleConfiguration({
|
s3.putBucketLifecycleConfiguration({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
LifecycleConfiguration: {
|
LifecycleConfiguration: {
|
||||||
|
@ -96,12 +92,11 @@ describe('aws-sdk test get bucket lifecycle', () => {
|
||||||
}],
|
}],
|
||||||
},
|
},
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, `Err putting lifecycle config: ${err}`);
|
expect(err).toEqual(null);
|
||||||
s3.getBucketLifecycleConfiguration({ Bucket: bucket },
|
s3.getBucketLifecycleConfiguration({ Bucket: bucket },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'Error getting lifecycle config: ' +
|
expect(err).toEqual(null);
|
||||||
`${err}`);
|
expect(res.Rules.length).toBe(1);
|
||||||
assert.strictEqual(res.Rules.length, 1);
|
|
||||||
assert.deepStrictEqual(res.Rules[0], {
|
assert.deepStrictEqual(res.Rules[0], {
|
||||||
Expiration: { Days: 1 },
|
Expiration: { Days: 1 },
|
||||||
ID: 'test-id',
|
ID: 'test-id',
|
||||||
|
@ -114,8 +109,7 @@ describe('aws-sdk test get bucket lifecycle', () => {
|
||||||
});
|
});
|
||||||
}));
|
}));
|
||||||
|
|
||||||
it('should get bucket lifecycle config with filter prefix and tags',
|
test('should get bucket lifecycle config with filter prefix and tags', done =>
|
||||||
done =>
|
|
||||||
s3.putBucketLifecycleConfiguration({
|
s3.putBucketLifecycleConfiguration({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
LifecycleConfiguration: {
|
LifecycleConfiguration: {
|
||||||
|
@ -137,12 +131,11 @@ describe('aws-sdk test get bucket lifecycle', () => {
|
||||||
}],
|
}],
|
||||||
},
|
},
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, `Err putting lifecycle config: ${err}`);
|
expect(err).toEqual(null);
|
||||||
s3.getBucketLifecycleConfiguration({ Bucket: bucket },
|
s3.getBucketLifecycleConfiguration({ Bucket: bucket },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'Error getting lifecycle config: ' +
|
expect(err).toEqual(null);
|
||||||
`${err}`);
|
expect(res.Rules.length).toBe(1);
|
||||||
assert.strictEqual(res.Rules.length, 1);
|
|
||||||
assert.deepStrictEqual(res.Rules[0], {
|
assert.deepStrictEqual(res.Rules[0], {
|
||||||
Expiration: { Days: 1 },
|
Expiration: { Days: 1 },
|
||||||
ID: 'test-id',
|
ID: 'test-id',
|
||||||
|
|
|
@ -42,14 +42,15 @@ describe('aws-node-sdk test getBucketReplication', () => {
|
||||||
|
|
||||||
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
||||||
|
|
||||||
it("should return 'ReplicationConfigurationNotFoundError' if bucket does " +
|
test("should return 'ReplicationConfigurationNotFoundError' if bucket does " +
|
||||||
'not have a replication configuration', done =>
|
'not have a replication configuration', done =>
|
||||||
s3.getBucketReplication({ Bucket: bucket }, err => {
|
s3.getBucketReplication({ Bucket: bucket }, err => {
|
||||||
assert(errors.ReplicationConfigurationNotFoundError[err.code]);
|
expect(errors.ReplicationConfigurationNotFoundError[err.code]).toBeTruthy();
|
||||||
return done();
|
return done();
|
||||||
}));
|
}));
|
||||||
|
|
||||||
it('should get the replication configuration that was put on a bucket',
|
test(
|
||||||
|
'should get the replication configuration that was put on a bucket',
|
||||||
done => s3.putBucketReplication({
|
done => s3.putBucketReplication({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
ReplicationConfiguration: replicationConfig,
|
ReplicationConfiguration: replicationConfig,
|
||||||
|
@ -67,13 +68,14 @@ describe('aws-node-sdk test getBucketReplication', () => {
|
||||||
assert.deepStrictEqual(data, expectedObj);
|
assert.deepStrictEqual(data, expectedObj);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
}));
|
})
|
||||||
|
);
|
||||||
|
|
||||||
it('should return AccessDenied if user is not bucket owner', done =>
|
test('should return AccessDenied if user is not bucket owner', done =>
|
||||||
otherAccountS3.getBucketReplication({ Bucket: bucket }, err => {
|
otherAccountS3.getBucketReplication({ Bucket: bucket }, err => {
|
||||||
assert(err);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'AccessDenied');
|
expect(err.code).toBe('AccessDenied');
|
||||||
assert.strictEqual(err.statusCode, 403);
|
expect(err.statusCode).toBe(403);
|
||||||
return done();
|
return done();
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
|
|
|
@ -25,17 +25,16 @@ describe('GET bucket cors', () => {
|
||||||
AllowedHeaders: ['*'],
|
AllowedHeaders: ['*'],
|
||||||
MaxAgeSeconds: 3000 },
|
MaxAgeSeconds: 3000 },
|
||||||
] };
|
] };
|
||||||
before(() =>
|
beforeAll(() =>
|
||||||
s3.createBucketAsync({ Bucket: bucketName })
|
s3.createBucketAsync({ Bucket: bucketName })
|
||||||
.then(() => s3.putBucketCorsAsync({
|
.then(() => s3.putBucketCorsAsync({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
CORSConfiguration: sampleCors,
|
CORSConfiguration: sampleCors,
|
||||||
})));
|
})));
|
||||||
|
|
||||||
it('should return cors configuration successfully', done => {
|
test('should return cors configuration successfully', done => {
|
||||||
s3.getBucketCors({ Bucket: bucketName }, (err, data) => {
|
s3.getBucketCors({ Bucket: bucketName }, (err, data) => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Found unexpected err ${err}`);
|
|
||||||
assert.deepStrictEqual(data.CORSRules,
|
assert.deepStrictEqual(data.CORSRules,
|
||||||
sampleCors.CORSRules);
|
sampleCors.CORSRules);
|
||||||
return done();
|
return done();
|
||||||
|
@ -50,18 +49,16 @@ describe('GET bucket cors', () => {
|
||||||
AllowedOrigins: ['http://www.example.com'],
|
AllowedOrigins: ['http://www.example.com'],
|
||||||
AllowedHeaders: [testValue] },
|
AllowedHeaders: [testValue] },
|
||||||
] };
|
] };
|
||||||
before(() =>
|
beforeAll(() =>
|
||||||
s3.createBucketAsync({ Bucket: bucketName })
|
s3.createBucketAsync({ Bucket: bucketName })
|
||||||
.then(() => s3.putBucketCorsAsync({
|
.then(() => s3.putBucketCorsAsync({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
CORSConfiguration: sampleCors,
|
CORSConfiguration: sampleCors,
|
||||||
})));
|
})));
|
||||||
|
|
||||||
it('should be preserved when putting / getting cors resource',
|
test('should be preserved when putting / getting cors resource', done => {
|
||||||
done => {
|
|
||||||
s3.getBucketCors({ Bucket: bucketName }, (err, data) => {
|
s3.getBucketCors({ Bucket: bucketName }, (err, data) => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Found unexpected err ${err}`);
|
|
||||||
assert.deepStrictEqual(data.CORSRules[0].AllowedHeaders,
|
assert.deepStrictEqual(data.CORSRules[0].AllowedHeaders,
|
||||||
sampleCors.CORSRules[0].AllowedHeaders);
|
sampleCors.CORSRules[0].AllowedHeaders);
|
||||||
return done();
|
return done();
|
||||||
|
@ -74,18 +71,16 @@ describe('GET bucket cors', () => {
|
||||||
{ AllowedMethods: ['PUT', 'POST', 'DELETE'],
|
{ AllowedMethods: ['PUT', 'POST', 'DELETE'],
|
||||||
AllowedOrigins: ['http://www.example.com'] },
|
AllowedOrigins: ['http://www.example.com'] },
|
||||||
] };
|
] };
|
||||||
before(() =>
|
beforeAll(() =>
|
||||||
s3.createBucketAsync({ Bucket: bucketName })
|
s3.createBucketAsync({ Bucket: bucketName })
|
||||||
.then(() => s3.putBucketCorsAsync({
|
.then(() => s3.putBucketCorsAsync({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
CORSConfiguration: sampleCors,
|
CORSConfiguration: sampleCors,
|
||||||
})));
|
})));
|
||||||
|
|
||||||
it('should be preserved when retrieving cors resource',
|
test('should be preserved when retrieving cors resource', done => {
|
||||||
done => {
|
|
||||||
s3.getBucketCors({ Bucket: bucketName }, (err, data) => {
|
s3.getBucketCors({ Bucket: bucketName }, (err, data) => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Found unexpected err ${err}`);
|
|
||||||
assert.deepStrictEqual(data.CORSRules[0].AllowedMethods,
|
assert.deepStrictEqual(data.CORSRules[0].AllowedMethods,
|
||||||
sampleCors.CORSRules[0].AllowedMethods);
|
sampleCors.CORSRules[0].AllowedMethods);
|
||||||
return done();
|
return done();
|
||||||
|
@ -94,7 +89,7 @@ describe('GET bucket cors', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('on bucket without cors configuration', () => {
|
describe('on bucket without cors configuration', () => {
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
process.stdout.write('about to create bucket\n');
|
process.stdout.write('about to create bucket\n');
|
||||||
s3.createBucket({ Bucket: bucketName }, err => {
|
s3.createBucket({ Bucket: bucketName }, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -105,11 +100,11 @@ describe('GET bucket cors', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return NoSuchCORSConfiguration', done => {
|
test('should return NoSuchCORSConfiguration', done => {
|
||||||
s3.getBucketCors({ Bucket: bucketName }, err => {
|
s3.getBucketCors({ Bucket: bucketName }, err => {
|
||||||
assert(err);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'NoSuchCORSConfiguration');
|
expect(err.code).toBe('NoSuchCORSConfiguration');
|
||||||
assert.strictEqual(err.statusCode, 404);
|
expect(err.statusCode).toBe(404);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -23,22 +23,20 @@ describeSkipAWS('GET bucket location ', () => {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
describe(`with location: ${location}`, () => {
|
describe(`with location: ${location}`, () => {
|
||||||
before(done => s3.createBucketAsync(
|
beforeAll(done => s3.createBucketAsync(
|
||||||
{
|
{
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
CreateBucketConfiguration: {
|
CreateBucketConfiguration: {
|
||||||
LocationConstraint: location,
|
LocationConstraint: location,
|
||||||
},
|
},
|
||||||
}, done));
|
}, done));
|
||||||
after(() => bucketUtil.deleteOne(bucketName));
|
afterAll(() => bucketUtil.deleteOne(bucketName));
|
||||||
|
|
||||||
it(`should return location configuration: ${location} ` +
|
test(`should return location configuration: ${location} ` +
|
||||||
'successfully',
|
'successfully', done => {
|
||||||
done => {
|
|
||||||
s3.getBucketLocation({ Bucket: bucketName },
|
s3.getBucketLocation({ Bucket: bucketName },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Found unexpected err ${err}`);
|
|
||||||
assert.deepStrictEqual(data.LocationConstraint,
|
assert.deepStrictEqual(data.LocationConstraint,
|
||||||
location);
|
location);
|
||||||
return done();
|
return done();
|
||||||
|
@ -48,7 +46,7 @@ describeSkipAWS('GET bucket location ', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with location us-east-1', () => {
|
describe('with location us-east-1', () => {
|
||||||
before(done => s3.createBucketAsync(
|
beforeAll(done => s3.createBucketAsync(
|
||||||
{
|
{
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
CreateBucketConfiguration: {
|
CreateBucketConfiguration: {
|
||||||
|
@ -56,12 +54,10 @@ describeSkipAWS('GET bucket location ', () => {
|
||||||
},
|
},
|
||||||
}, done));
|
}, done));
|
||||||
afterEach(() => bucketUtil.deleteOne(bucketName));
|
afterEach(() => bucketUtil.deleteOne(bucketName));
|
||||||
it('should return empty location',
|
test('should return empty location', done => {
|
||||||
done => {
|
|
||||||
s3.getBucketLocation({ Bucket: bucketName },
|
s3.getBucketLocation({ Bucket: bucketName },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Found unexpected err ${err}`);
|
|
||||||
assert.deepStrictEqual(data.LocationConstraint, '');
|
assert.deepStrictEqual(data.LocationConstraint, '');
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
|
@ -69,7 +65,7 @@ describeSkipAWS('GET bucket location ', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('without location configuration', () => {
|
describe('without location configuration', () => {
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('Deleting bucket\n');
|
process.stdout.write('Deleting bucket\n');
|
||||||
return bucketUtil.deleteOne(bucketName)
|
return bucketUtil.deleteOne(bucketName)
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
|
@ -78,15 +74,14 @@ describeSkipAWS('GET bucket location ', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return request endpoint as location', done => {
|
test('should return request endpoint as location', done => {
|
||||||
process.stdout.write('Creating bucket');
|
process.stdout.write('Creating bucket');
|
||||||
const request = s3.createBucket({ Bucket: bucketName });
|
const request = s3.createBucket({ Bucket: bucketName });
|
||||||
request.on('build', () => {
|
request.on('build', () => {
|
||||||
request.httpRequest.body = '';
|
request.httpRequest.body = '';
|
||||||
});
|
});
|
||||||
request.send(err => {
|
request.send(err => {
|
||||||
assert.strictEqual(err, null, 'Error creating bucket: ' +
|
expect(err).toBe(null);
|
||||||
`${err}`);
|
|
||||||
const host = request.service.endpoint.hostname;
|
const host = request.service.endpoint.hostname;
|
||||||
let endpoint = config.restEndpoints[host];
|
let endpoint = config.restEndpoints[host];
|
||||||
// s3 actually returns '' for us-east-1
|
// s3 actually returns '' for us-east-1
|
||||||
|
@ -95,9 +90,8 @@ describeSkipAWS('GET bucket location ', () => {
|
||||||
}
|
}
|
||||||
s3.getBucketLocation({ Bucket: bucketName },
|
s3.getBucketLocation({ Bucket: bucketName },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null, 'Expected succes, ' +
|
expect(err).toBe(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
expect(data.LocationConstraint).toBe(endpoint);
|
||||||
assert.strictEqual(data.LocationConstraint, endpoint);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -105,22 +99,21 @@ describeSkipAWS('GET bucket location ', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with location configuration', () => {
|
describe('with location configuration', () => {
|
||||||
before(done => s3.createBucketAsync(
|
beforeAll(done => s3.createBucketAsync(
|
||||||
{
|
{
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
CreateBucketConfiguration: {
|
CreateBucketConfiguration: {
|
||||||
LocationConstraint: 'us-east-1',
|
LocationConstraint: 'us-east-1',
|
||||||
},
|
},
|
||||||
}, done));
|
}, done));
|
||||||
after(() => bucketUtil.deleteOne(bucketName));
|
afterAll(() => bucketUtil.deleteOne(bucketName));
|
||||||
|
|
||||||
it('should return AccessDenied if user is not bucket owner',
|
test('should return AccessDenied if user is not bucket owner', done => {
|
||||||
done => {
|
|
||||||
otherAccountS3.getBucketLocation({ Bucket: bucketName },
|
otherAccountS3.getBucketLocation({ Bucket: bucketName },
|
||||||
err => {
|
err => {
|
||||||
assert(err);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'AccessDenied');
|
expect(err.code).toBe('AccessDenied');
|
||||||
assert.strictEqual(err.statusCode, 403);
|
expect(err.statusCode).toBe(403);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -30,17 +30,16 @@ describe('GET bucket website', () => {
|
||||||
afterEach(() => bucketUtil.deleteOne(bucketName));
|
afterEach(() => bucketUtil.deleteOne(bucketName));
|
||||||
|
|
||||||
describe('with existing bucket configuration', () => {
|
describe('with existing bucket configuration', () => {
|
||||||
before(() =>
|
beforeAll(() =>
|
||||||
s3.createBucketAsync({ Bucket: bucketName })
|
s3.createBucketAsync({ Bucket: bucketName })
|
||||||
.then(() => s3.putBucketWebsiteAsync({
|
.then(() => s3.putBucketWebsiteAsync({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
WebsiteConfiguration: config,
|
WebsiteConfiguration: config,
|
||||||
})));
|
})));
|
||||||
|
|
||||||
it('should return bucket website xml successfully', done => {
|
test('should return bucket website xml successfully', done => {
|
||||||
s3.getBucketWebsite({ Bucket: bucketName }, (err, data) => {
|
s3.getBucketWebsite({ Bucket: bucketName }, (err, data) => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Found unexpected err ${err}`);
|
|
||||||
const configObject = Object.assign({}, config);
|
const configObject = Object.assign({}, config);
|
||||||
assert.deepStrictEqual(data, configObject);
|
assert.deepStrictEqual(data, configObject);
|
||||||
return done();
|
return done();
|
||||||
|
@ -49,7 +48,7 @@ describe('GET bucket website', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('on bucket without website configuration', () => {
|
describe('on bucket without website configuration', () => {
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
process.stdout.write('about to create bucket\n');
|
process.stdout.write('about to create bucket\n');
|
||||||
s3.createBucket({ Bucket: bucketName }, err => {
|
s3.createBucket({ Bucket: bucketName }, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -60,11 +59,11 @@ describe('GET bucket website', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return NoSuchWebsiteConfiguration', done => {
|
test('should return NoSuchWebsiteConfiguration', done => {
|
||||||
s3.getBucketWebsite({ Bucket: bucketName }, err => {
|
s3.getBucketWebsite({ Bucket: bucketName }, err => {
|
||||||
assert(err);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'NoSuchWebsiteConfiguration');
|
expect(err.code).toBe('NoSuchWebsiteConfiguration');
|
||||||
assert.strictEqual(err.statusCode, 404);
|
expect(err.statusCode).toBe(404);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -9,20 +9,21 @@ describe('HEAD bucket', () => {
|
||||||
let bucketUtil;
|
let bucketUtil;
|
||||||
let s3;
|
let s3;
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should return an error to a head request without a bucket name',
|
test(
|
||||||
|
'should return an error to a head request without a bucket name',
|
||||||
done => {
|
done => {
|
||||||
s3.headBucket({ Bucket: '' }, err => {
|
s3.headBucket({ Bucket: '' }, err => {
|
||||||
assert.notEqual(err, null,
|
expect(err).not.toEqual(null);
|
||||||
'Expected failure but got success');
|
expect(err.code).toBe(405);
|
||||||
assert.strictEqual(err.code, 405);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -48,7 +48,7 @@ const objects = [
|
||||||
|
|
||||||
describe('Listing corner cases tests', () => {
|
describe('Listing corner cases tests', () => {
|
||||||
let s3;
|
let s3;
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
const config = getConfig('default', { signatureVersion: 'v4' });
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
s3 = new AWS.S3(config);
|
s3 = new AWS.S3(config);
|
||||||
s3.createBucket(
|
s3.createBucket(
|
||||||
|
@ -64,7 +64,7 @@ describe('Listing corner cases tests', () => {
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
s3.listObjects({ Bucket }, (err, data) => {
|
s3.listObjects({ Bucket }, (err, data) => {
|
||||||
async.each(data.Contents, (o, next) => {
|
async.each(data.Contents, (o, next) => {
|
||||||
s3.deleteObject({ Bucket, Key: o.Key }, next);
|
s3.deleteObject({ Bucket, Key: o.Key }, next);
|
||||||
|
@ -73,9 +73,9 @@ describe('Listing corner cases tests', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list everything', done => {
|
test('should list everything', done => {
|
||||||
s3.listObjects({ Bucket }, (err, data) => {
|
s3.listObjects({ Bucket }, (err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
IsTruncated: false,
|
IsTruncated: false,
|
||||||
|
@ -100,14 +100,14 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with valid marker', done => {
|
test('should list with valid marker', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
Delimiter: '/',
|
Delimiter: '/',
|
||||||
Marker: 'notes/summer/1.txt',
|
Marker: 'notes/summer/1.txt',
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
IsTruncated: false,
|
IsTruncated: false,
|
||||||
|
@ -122,14 +122,14 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with unexpected marker', done => {
|
test('should list with unexpected marker', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
Delimiter: '/',
|
Delimiter: '/',
|
||||||
Marker: 'zzzz',
|
Marker: 'zzzz',
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
IsTruncated: false,
|
IsTruncated: false,
|
||||||
Marker: 'zzzz',
|
Marker: 'zzzz',
|
||||||
|
@ -143,7 +143,7 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with unexpected marker and prefix', done => {
|
test('should list with unexpected marker and prefix', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
Delimiter: '/',
|
Delimiter: '/',
|
||||||
|
@ -151,7 +151,7 @@ describe('Listing corner cases tests', () => {
|
||||||
Prefix: 'notes/summer/',
|
Prefix: 'notes/summer/',
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
IsTruncated: false,
|
IsTruncated: false,
|
||||||
Marker: 'notes/summer0',
|
Marker: 'notes/summer0',
|
||||||
|
@ -165,13 +165,13 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with MaxKeys', done => {
|
test('should list with MaxKeys', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
MaxKeys: 3,
|
MaxKeys: 3,
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
Marker: '',
|
Marker: '',
|
||||||
|
@ -188,13 +188,13 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with big MaxKeys', done => {
|
test('should list with big MaxKeys', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
MaxKeys: 15000,
|
MaxKeys: 15000,
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
Marker: '',
|
Marker: '',
|
||||||
|
@ -218,13 +218,13 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with delimiter', done => {
|
test('should list with delimiter', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
Delimiter: '/',
|
Delimiter: '/',
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
Marker: '',
|
Marker: '',
|
||||||
|
@ -239,13 +239,13 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with long delimiter', done => {
|
test('should list with long delimiter', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
Delimiter: 'notes/summer',
|
Delimiter: 'notes/summer',
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
Marker: '',
|
Marker: '',
|
||||||
|
@ -267,14 +267,14 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with delimiter and prefix related to #147', done => {
|
test('should list with delimiter and prefix related to #147', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
Delimiter: '/',
|
Delimiter: '/',
|
||||||
Prefix: 'notes/',
|
Prefix: 'notes/',
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
Marker: '',
|
Marker: '',
|
||||||
|
@ -296,7 +296,7 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with prefix and marker related to #147', done => {
|
test('should list with prefix and marker related to #147', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
Delimiter: '/',
|
Delimiter: '/',
|
||||||
|
@ -304,7 +304,7 @@ describe('Listing corner cases tests', () => {
|
||||||
Marker: 'notes/year.txt',
|
Marker: 'notes/year.txt',
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
Marker: 'notes/year.txt',
|
Marker: 'notes/year.txt',
|
||||||
|
@ -319,7 +319,7 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with all parameters 1 of 5', done => {
|
test('should list with all parameters 1 of 5', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
Delimiter: '/',
|
Delimiter: '/',
|
||||||
|
@ -328,7 +328,7 @@ describe('Listing corner cases tests', () => {
|
||||||
MaxKeys: 1,
|
MaxKeys: 1,
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
Marker: 'notes/',
|
Marker: 'notes/',
|
||||||
|
@ -344,7 +344,7 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with all parameters 2 of 5', done => {
|
test('should list with all parameters 2 of 5', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
Delimiter: '/',
|
Delimiter: '/',
|
||||||
|
@ -353,7 +353,7 @@ describe('Listing corner cases tests', () => {
|
||||||
MaxKeys: 1,
|
MaxKeys: 1,
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
Marker: 'notes/spring/',
|
Marker: 'notes/spring/',
|
||||||
|
@ -369,7 +369,7 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with all parameters 3 of 5', done => {
|
test('should list with all parameters 3 of 5', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
Delimiter: '/',
|
Delimiter: '/',
|
||||||
|
@ -378,7 +378,7 @@ describe('Listing corner cases tests', () => {
|
||||||
MaxKeys: 1,
|
MaxKeys: 1,
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
Marker: 'notes/summer/',
|
Marker: 'notes/summer/',
|
||||||
|
@ -394,7 +394,7 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with all parameters 4 of 5', done => {
|
test('should list with all parameters 4 of 5', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
Delimiter: '/',
|
Delimiter: '/',
|
||||||
|
@ -403,7 +403,7 @@ describe('Listing corner cases tests', () => {
|
||||||
MaxKeys: 1,
|
MaxKeys: 1,
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
Marker: 'notes/year.txt',
|
Marker: 'notes/year.txt',
|
||||||
|
@ -419,7 +419,7 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should list with all parameters 5 of 5', done => {
|
test('should list with all parameters 5 of 5', done => {
|
||||||
s3.listObjects(
|
s3.listObjects(
|
||||||
{ Bucket,
|
{ Bucket,
|
||||||
Delimiter: '/',
|
Delimiter: '/',
|
||||||
|
@ -428,7 +428,7 @@ describe('Listing corner cases tests', () => {
|
||||||
MaxKeys: 1,
|
MaxKeys: 1,
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
Marker: 'notes/yore.rs',
|
Marker: 'notes/yore.rs',
|
||||||
|
@ -443,7 +443,7 @@ describe('Listing corner cases tests', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should ends listing on last common prefix', done => {
|
test('should ends listing on last common prefix', done => {
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket,
|
Bucket,
|
||||||
Key: 'notes/zaphod/TheFourth.txt',
|
Key: 'notes/zaphod/TheFourth.txt',
|
||||||
|
@ -458,7 +458,7 @@ describe('Listing corner cases tests', () => {
|
||||||
MaxKeys: 1,
|
MaxKeys: 1,
|
||||||
},
|
},
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
IsTruncated: false,
|
IsTruncated: false,
|
||||||
|
|
|
@ -24,14 +24,14 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
s3 = new S3(config);
|
s3 = new S3(config);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 and AccessDenied', done => {
|
test('should return 403 and AccessDenied', done => {
|
||||||
const params = { Bucket: 'mybucket' };
|
const params = { Bucket: 'mybucket' };
|
||||||
|
|
||||||
s3.makeUnauthenticatedRequest('createBucket', params, error => {
|
s3.makeUnauthenticatedRequest('createBucket', params, error => {
|
||||||
assert(error);
|
expect(error).toBeTruthy();
|
||||||
|
|
||||||
assert.strictEqual(error.statusCode, 403);
|
expect(error.statusCode).toBe(403);
|
||||||
assert.strictEqual(error.code, 'AccessDenied');
|
expect(error.code).toBe('AccessDenied');
|
||||||
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
@ -41,7 +41,7 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
let bucketUtil;
|
let bucketUtil;
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
'provided.', done => {
|
'provided.', done => {
|
||||||
bucketUtil.s3.createBucket({ Bucket: bucketName }, done);
|
bucketUtil.s3.createBucket({ Bucket: bucketName }, done);
|
||||||
});
|
});
|
||||||
it('should return a 200 if us-east behavior', done => {
|
test('should return a 200 if us-east behavior', done => {
|
||||||
bucketUtil.s3.createBucket({
|
bucketUtil.s3.createBucket({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
CreateBucketConfiguration: {
|
CreateBucketConfiguration: {
|
||||||
|
@ -70,17 +70,16 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
},
|
},
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
it('should return a 409 if us-west behavior', done => {
|
test('should return a 409 if us-west behavior', done => {
|
||||||
bucketUtil.s3.createBucket({
|
bucketUtil.s3.createBucket({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
CreateBucketConfiguration: {
|
CreateBucketConfiguration: {
|
||||||
LocationConstraint: 'scality-us-west-1',
|
LocationConstraint: 'scality-us-west-1',
|
||||||
},
|
},
|
||||||
}, error => {
|
}, error => {
|
||||||
assert.notEqual(error, null,
|
expect(error).not.toEqual(null);
|
||||||
'Expected failure but got success');
|
expect(error.code).toBe('BucketAlreadyOwnedByYou');
|
||||||
assert.strictEqual(error.code, 'BucketAlreadyOwnedByYou');
|
expect(error.statusCode).toBe(409);
|
||||||
assert.strictEqual(error.statusCode, 409);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -89,7 +88,7 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
describe('bucket naming restriction', () => {
|
describe('bucket naming restriction', () => {
|
||||||
let testFn;
|
let testFn;
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
testFn = (bucketName, done, errStatus, errCode) => {
|
testFn = (bucketName, done, errStatus, errCode) => {
|
||||||
const expectedStatus = errStatus || 400;
|
const expectedStatus = errStatus || 400;
|
||||||
const expectedCode = errCode || 'InvalidBucketName';
|
const expectedCode = errCode || 'InvalidBucketName';
|
||||||
|
@ -102,9 +101,8 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
return done(e);
|
return done(e);
|
||||||
})
|
})
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
assert.strictEqual(error.code, expectedCode);
|
expect(error.code).toBe(expectedCode);
|
||||||
assert.strictEqual(error.statusCode,
|
expect(error.statusCode).toBe(expectedStatus);
|
||||||
expectedStatus);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
@ -115,20 +113,19 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
// Hence it skips some of test suites.
|
// Hence it skips some of test suites.
|
||||||
const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it;
|
const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it;
|
||||||
|
|
||||||
it('should return 405 if empty name', done => {
|
test('should return 405 if empty name', done => {
|
||||||
const shortName = '';
|
const shortName = '';
|
||||||
|
|
||||||
testFn(shortName, done, 405, 'MethodNotAllowed');
|
testFn(shortName, done, 405, 'MethodNotAllowed');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 400 if name is shorter than 3 chars', done => {
|
test('should return 400 if name is shorter than 3 chars', done => {
|
||||||
const shortName = 'as';
|
const shortName = 'as';
|
||||||
|
|
||||||
testFn(shortName, done);
|
testFn(shortName, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 if name is reserved (e.g., METADATA)',
|
test('should return 403 if name is reserved (e.g., METADATA)', done => {
|
||||||
done => {
|
|
||||||
const reservedName = 'METADATA';
|
const reservedName = 'METADATA';
|
||||||
testFn(reservedName, done, 403, 'AccessDenied');
|
testFn(reservedName, done, 403, 'AccessDenied');
|
||||||
});
|
});
|
||||||
|
@ -154,7 +151,7 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
it('should return 400 if name ends with period', done => {
|
test('should return 400 if name ends with period', done => {
|
||||||
const invalidName = 'myawsbucket.';
|
const invalidName = 'myawsbucket.';
|
||||||
testFn(invalidName, done);
|
testFn(invalidName, done);
|
||||||
});
|
});
|
||||||
|
@ -167,7 +164,7 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
it('should return 400 if name has special chars', done => {
|
test('should return 400 if name has special chars', done => {
|
||||||
const invalidName = 'my.#s3bucket';
|
const invalidName = 'my.#s3bucket';
|
||||||
testFn(invalidName, done);
|
testFn(invalidName, done);
|
||||||
});
|
});
|
||||||
|
@ -177,27 +174,31 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
function _test(name, done) {
|
function _test(name, done) {
|
||||||
bucketUtil.s3.createBucket({ Bucket: name }, (err, res) => {
|
bucketUtil.s3.createBucket({ Bucket: name }, (err, res) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
assert(res.Location, 'No Location in response');
|
expect(res.Location).toBeTruthy();
|
||||||
assert.deepStrictEqual(res.Location, `/${name}`,
|
assert.deepStrictEqual(res.Location, `/${name}`,
|
||||||
'Wrong Location header');
|
'Wrong Location header');
|
||||||
bucketUtil.deleteOne(name).then(() => done()).catch(done);
|
bucketUtil.deleteOne(name).then(() => done()).catch(done);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
it('should create bucket if name is valid', done =>
|
test('should create bucket if name is valid', done =>
|
||||||
_test('scality-very-valid-bucket-name', done));
|
_test('scality-very-valid-bucket-name', done));
|
||||||
|
|
||||||
it('should create bucket if name is some prefix and an IP address',
|
test(
|
||||||
done => _test('prefix-192.168.5.4', done));
|
'should create bucket if name is some prefix and an IP address',
|
||||||
|
done => _test('prefix-192.168.5.4', done)
|
||||||
|
);
|
||||||
|
|
||||||
it('should create bucket if name is an IP address with some suffix',
|
test(
|
||||||
done => _test('192.168.5.4-suffix', done));
|
'should create bucket if name is an IP address with some suffix',
|
||||||
|
done => _test('192.168.5.4-suffix', done)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
Object.keys(locationConstraints).forEach(
|
Object.keys(locationConstraints).forEach(
|
||||||
location => {
|
location => {
|
||||||
describeSkipAWS(`bucket creation with location: ${location}`,
|
describeSkipAWS(`bucket creation with location: ${location}`,
|
||||||
() => {
|
() => {
|
||||||
after(() => bucketUtil.deleteOne(bucketName));
|
afterAll(() => bucketUtil.deleteOne(bucketName));
|
||||||
it(`should create bucket with location: ${location}`, done => {
|
test(`should create bucket with location: ${location}`, done => {
|
||||||
bucketUtil.s3.createBucketAsync(
|
bucketUtil.s3.createBucketAsync(
|
||||||
{
|
{
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
|
@ -210,7 +211,7 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('bucket creation with invalid location', () => {
|
describe('bucket creation with invalid location', () => {
|
||||||
it('should return errors InvalidLocationConstraint', done => {
|
test('should return errors InvalidLocationConstraint', done => {
|
||||||
bucketUtil.s3.createBucketAsync(
|
bucketUtil.s3.createBucketAsync(
|
||||||
{
|
{
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
|
@ -218,18 +219,17 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
LocationConstraint: 'coco',
|
LocationConstraint: 'coco',
|
||||||
},
|
},
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.strictEqual(err.code,
|
expect(err.code).toBe('InvalidLocationConstraint');
|
||||||
'InvalidLocationConstraint');
|
expect(err.statusCode).toBe(400);
|
||||||
assert.strictEqual(err.statusCode, 400);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('bucket creation with ingestion location', () => {
|
describe('bucket creation with ingestion location', () => {
|
||||||
after(done =>
|
afterAll(done =>
|
||||||
bucketUtil.s3.deleteBucket({ Bucket: bucketName }, done));
|
bucketUtil.s3.deleteBucket({ Bucket: bucketName }, done));
|
||||||
it('should create bucket with location and ingestion', done => {
|
test('should create bucket with location and ingestion', done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => bucketUtil.s3.createBucketAsync(
|
next => bucketUtil.s3.createBucketAsync(
|
||||||
{
|
{
|
||||||
|
@ -239,7 +239,7 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
},
|
},
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
assert.strictEqual(res.Location, `/${bucketName}`);
|
expect(res.Location).toBe(`/${bucketName}`);
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
next => bucketUtil.s3.getBucketLocation(
|
next => bucketUtil.s3.getBucketLocation(
|
||||||
|
@ -247,13 +247,13 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
assert.strictEqual(res.LocationConstraint, 'us-east-2');
|
expect(res.LocationConstraint).toBe('us-east-2');
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
next => bucketUtil.s3.getBucketVersioning(
|
next => bucketUtil.s3.getBucketVersioning(
|
||||||
{ Bucket: bucketName }, (err, res) => {
|
{ Bucket: bucketName }, (err, res) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
assert.strictEqual(res.Status, 'Enabled');
|
expect(res.Status).toBe('Enabled');
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
|
|
|
@ -25,14 +25,14 @@ describe('aws-node-sdk test bucket put acl', () => {
|
||||||
let s3;
|
let s3;
|
||||||
|
|
||||||
// setup test
|
// setup test
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
const config = getConfig('default', { signatureVersion: 'v4' });
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
s3 = new S3(config);
|
s3 = new S3(config);
|
||||||
s3.createBucket({ Bucket: bucket }, done);
|
s3.createBucket({ Bucket: bucket }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
// delete bucket after testing
|
// delete bucket after testing
|
||||||
after(done => s3.deleteBucket({ Bucket: bucket }, done));
|
afterAll(done => s3.deleteBucket({ Bucket: bucket }, done));
|
||||||
|
|
||||||
const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it;
|
const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it;
|
||||||
itSkipIfAWS('should not accept xml body larger than 512 KB', done => {
|
itSkipIfAWS('should not accept xml body larger than 512 KB', done => {
|
||||||
|
@ -48,9 +48,8 @@ describe('aws-node-sdk test bucket put acl', () => {
|
||||||
};
|
};
|
||||||
s3.putBucketAcl(params, error => {
|
s3.putBucketAcl(params, error => {
|
||||||
if (error) {
|
if (error) {
|
||||||
assert.strictEqual(error.statusCode, 400);
|
expect(error.statusCode).toBe(400);
|
||||||
assert.strictEqual(
|
expect(error.code).toBe('InvalidRequest');
|
||||||
error.code, 'InvalidRequest');
|
|
||||||
done();
|
done();
|
||||||
} else {
|
} else {
|
||||||
done('accepted xml body larger than 512 KB');
|
done('accepted xml body larger than 512 KB');
|
||||||
|
@ -80,18 +79,18 @@ describe('PUT Bucket ACL', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidArgument if invalid grantee ' +
|
test('should return InvalidArgument if invalid grantee ' +
|
||||||
'user ID provided in ACL header request', done => {
|
'user ID provided in ACL header request', done => {
|
||||||
s3.putBucketAcl({
|
s3.putBucketAcl({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
GrantRead: 'id=invalidUserID' }, err => {
|
GrantRead: 'id=invalidUserID' }, err => {
|
||||||
assert.strictEqual(err.statusCode, 400);
|
expect(err.statusCode).toBe(400);
|
||||||
assert.strictEqual(err.code, 'InvalidArgument');
|
expect(err.code).toBe('InvalidArgument');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidArgument if invalid grantee ' +
|
test('should return InvalidArgument if invalid grantee ' +
|
||||||
'user ID provided in ACL request body', done => {
|
'user ID provided in ACL request body', done => {
|
||||||
s3.putBucketAcl({
|
s3.putBucketAcl({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
|
@ -111,8 +110,8 @@ describe('PUT Bucket ACL', () => {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.strictEqual(err.statusCode, 400);
|
expect(err.statusCode).toBe(400);
|
||||||
assert.strictEqual(err.code, 'InvalidArgument');
|
expect(err.code).toBe('InvalidArgument');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -20,13 +20,10 @@ const expirationRule = {
|
||||||
// Check for the expected error response code and status code.
|
// Check for the expected error response code and status code.
|
||||||
function assertError(err, expectedErr, cb) {
|
function assertError(err, expectedErr, cb) {
|
||||||
if (expectedErr === null) {
|
if (expectedErr === null) {
|
||||||
assert.strictEqual(err, null, `expected no error but got '${err}'`);
|
expect(err).toBe(null);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(err.code, expectedErr, 'incorrect error response ' +
|
expect(err.code).toBe(expectedErr);
|
||||||
`code: should be '${expectedErr}' but got '${err.code}'`);
|
expect(err.statusCode).toBe(errors[expectedErr].code);
|
||||||
assert.strictEqual(err.statusCode, errors[expectedErr].code,
|
|
||||||
'incorrect error status code: should be ' +
|
|
||||||
`${errors[expectedErr].code}, but got '${err.statusCode}'`);
|
|
||||||
}
|
}
|
||||||
cb();
|
cb();
|
||||||
}
|
}
|
||||||
|
@ -50,14 +47,14 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
let s3;
|
let s3;
|
||||||
let otherAccountS3;
|
let otherAccountS3;
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
const config = getConfig('default', { signatureVersion: 'v4' });
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
s3 = new S3(config);
|
s3 = new S3(config);
|
||||||
otherAccountS3 = new BucketUtility('lisa', {}).s3;
|
otherAccountS3 = new BucketUtility('lisa', {}).s3;
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return NoSuchBucket error if bucket does not exist', done => {
|
test('should return NoSuchBucket error if bucket does not exist', done => {
|
||||||
const params = getLifecycleParams();
|
const params = getLifecycleParams();
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, 'NoSuchBucket', done));
|
assertError(err, 'NoSuchBucket', done));
|
||||||
|
@ -68,69 +65,69 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
|
|
||||||
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
||||||
|
|
||||||
it('should return AccessDenied if user is not bucket owner', done => {
|
test('should return AccessDenied if user is not bucket owner', done => {
|
||||||
const params = getLifecycleParams();
|
const params = getLifecycleParams();
|
||||||
otherAccountS3.putBucketLifecycleConfiguration(params,
|
otherAccountS3.putBucketLifecycleConfiguration(params,
|
||||||
err => assertError(err, 'AccessDenied', done));
|
err => assertError(err, 'AccessDenied', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put lifecycle configuration on bucket', done => {
|
test('should put lifecycle configuration on bucket', done => {
|
||||||
const params = getLifecycleParams();
|
const params = getLifecycleParams();
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, null, done));
|
assertError(err, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow lifecycle config with no Status', done => {
|
test('should not allow lifecycle config with no Status', done => {
|
||||||
const params = getLifecycleParams({ key: 'Status', value: '' });
|
const params = getLifecycleParams({ key: 'Status', value: '' });
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, 'MalformedXML', done));
|
assertError(err, 'MalformedXML', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow lifecycle config with no Prefix or Filter',
|
test('should not allow lifecycle config with no Prefix or Filter', done => {
|
||||||
done => {
|
|
||||||
const params = getLifecycleParams({ key: 'Prefix', value: null });
|
const params = getLifecycleParams({ key: 'Prefix', value: null });
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, 'MalformedXML', done));
|
assertError(err, 'MalformedXML', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow lifecycle config with empty action', done => {
|
test('should not allow lifecycle config with empty action', done => {
|
||||||
const params = getLifecycleParams({ key: 'Expiration', value: {} });
|
const params = getLifecycleParams({ key: 'Expiration', value: {} });
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, 'MalformedXML', done));
|
assertError(err, 'MalformedXML', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow lifecycle config with ID longer than 255 char',
|
test(
|
||||||
|
'should not allow lifecycle config with ID longer than 255 char',
|
||||||
done => {
|
done => {
|
||||||
const params =
|
const params =
|
||||||
getLifecycleParams({ key: 'ID', value: 'a'.repeat(256) });
|
getLifecycleParams({ key: 'ID', value: 'a'.repeat(256) });
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, 'InvalidArgument', done));
|
assertError(err, 'InvalidArgument', done));
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should allow lifecycle config with Prefix length < 1024', done => {
|
test('should allow lifecycle config with Prefix length < 1024', done => {
|
||||||
const params =
|
const params =
|
||||||
getLifecycleParams({ key: 'Prefix', value: 'a'.repeat(1023) });
|
getLifecycleParams({ key: 'Prefix', value: 'a'.repeat(1023) });
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, null, done));
|
assertError(err, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow lifecycle config with Prefix length === 1024',
|
test('should allow lifecycle config with Prefix length === 1024', done => {
|
||||||
done => {
|
|
||||||
const params =
|
const params =
|
||||||
getLifecycleParams({ key: 'Prefix', value: 'a'.repeat(1024) });
|
getLifecycleParams({ key: 'Prefix', value: 'a'.repeat(1024) });
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, null, done));
|
assertError(err, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow lifecycle config with Prefix length > 1024',
|
test('should not allow lifecycle config with Prefix length > 1024', done => {
|
||||||
done => {
|
|
||||||
const params =
|
const params =
|
||||||
getLifecycleParams({ key: 'Prefix', value: 'a'.repeat(1025) });
|
getLifecycleParams({ key: 'Prefix', value: 'a'.repeat(1025) });
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, 'InvalidRequest', done));
|
assertError(err, 'InvalidRequest', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow lifecycle config with Filter.Prefix length > 1024',
|
test(
|
||||||
|
'should not allow lifecycle config with Filter.Prefix length > 1024',
|
||||||
done => {
|
done => {
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
|
@ -139,9 +136,10 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
delete params.LifecycleConfiguration.Rules[0].Prefix;
|
delete params.LifecycleConfiguration.Rules[0].Prefix;
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, 'InvalidRequest', done));
|
assertError(err, 'InvalidRequest', done));
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should not allow lifecycle config with Filter.And.Prefix length ' +
|
test('should not allow lifecycle config with Filter.And.Prefix length ' +
|
||||||
'> 1024', done => {
|
'> 1024', done => {
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
|
@ -157,7 +155,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
assertError(err, 'InvalidRequest', done));
|
assertError(err, 'InvalidRequest', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow lifecycle config with Tag.Key length < 128', done => {
|
test('should allow lifecycle config with Tag.Key length < 128', done => {
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
value: { Tag: { Key: 'a'.repeat(127), Value: 'bar' } },
|
value: { Tag: { Key: 'a'.repeat(127), Value: 'bar' } },
|
||||||
|
@ -167,8 +165,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
assertError(err, null, done));
|
assertError(err, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow lifecycle config with Tag.Key length === 128',
|
test('should allow lifecycle config with Tag.Key length === 128', done => {
|
||||||
done => {
|
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
value: { Tag: { Key: 'a'.repeat(128), Value: 'bar' } },
|
value: { Tag: { Key: 'a'.repeat(128), Value: 'bar' } },
|
||||||
|
@ -178,8 +175,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
assertError(err, null, done));
|
assertError(err, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow lifecycle config with Tag.Key length > 128',
|
test('should not allow lifecycle config with Tag.Key length > 128', done => {
|
||||||
done => {
|
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
value: { Tag: { Key: 'a'.repeat(129), Value: 'bar' } },
|
value: { Tag: { Key: 'a'.repeat(129), Value: 'bar' } },
|
||||||
|
@ -189,8 +185,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
assertError(err, 'InvalidRequest', done));
|
assertError(err, 'InvalidRequest', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow lifecycle config with Tag.Value length < 256',
|
test('should allow lifecycle config with Tag.Value length < 256', done => {
|
||||||
done => {
|
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
value: { Tag: { Key: 'a', Value: 'b'.repeat(255) } },
|
value: { Tag: { Key: 'a', Value: 'b'.repeat(255) } },
|
||||||
|
@ -200,8 +195,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
assertError(err, null, done));
|
assertError(err, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow lifecycle config with Tag.Value length === 256',
|
test('should allow lifecycle config with Tag.Value length === 256', done => {
|
||||||
done => {
|
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
value: { Tag: { Key: 'a', Value: 'b'.repeat(256) } },
|
value: { Tag: { Key: 'a', Value: 'b'.repeat(256) } },
|
||||||
|
@ -211,8 +205,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
assertError(err, null, done));
|
assertError(err, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow lifecycle config with Tag.Value length > 256',
|
test('should not allow lifecycle config with Tag.Value length > 256', done => {
|
||||||
done => {
|
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
value: { Tag: { Key: 'a', Value: 'b'.repeat(257) } },
|
value: { Tag: { Key: 'a', Value: 'b'.repeat(257) } },
|
||||||
|
@ -222,20 +215,20 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
assertError(err, 'InvalidRequest', done));
|
assertError(err, 'InvalidRequest', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow lifecycle config with Prefix and Filter', done => {
|
test('should not allow lifecycle config with Prefix and Filter', done => {
|
||||||
const params = getLifecycleParams(
|
const params = getLifecycleParams(
|
||||||
{ key: 'Filter', value: { Prefix: 'foo' } });
|
{ key: 'Filter', value: { Prefix: 'foo' } });
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, 'MalformedXML', done));
|
assertError(err, 'MalformedXML', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow lifecycle config without ID', done => {
|
test('should allow lifecycle config without ID', done => {
|
||||||
const params = getLifecycleParams({ key: 'ID', value: '' });
|
const params = getLifecycleParams({ key: 'ID', value: '' });
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, null, done));
|
assertError(err, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow lifecycle config with multiple actions', done => {
|
test('should allow lifecycle config with multiple actions', done => {
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'NoncurrentVersionExpiration',
|
key: 'NoncurrentVersionExpiration',
|
||||||
value: { NoncurrentDays: 1 },
|
value: { NoncurrentDays: 1 },
|
||||||
|
@ -246,25 +239,25 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
|
|
||||||
|
|
||||||
describe('with Rule.Filter not Rule.Prefix', () => {
|
describe('with Rule.Filter not Rule.Prefix', () => {
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
expirationRule.Prefix = null;
|
expirationRule.Prefix = null;
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow config with empty Filter', done => {
|
test('should allow config with empty Filter', done => {
|
||||||
const params = getLifecycleParams({ key: 'Filter', value: {} });
|
const params = getLifecycleParams({ key: 'Filter', value: {} });
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, null, done));
|
assertError(err, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow config with And & Prefix', done => {
|
test('should not allow config with And & Prefix', done => {
|
||||||
const params = getLifecycleParams(
|
const params = getLifecycleParams(
|
||||||
{ key: 'Filter', value: { Prefix: 'foo', And: {} } });
|
{ key: 'Filter', value: { Prefix: 'foo', And: {} } });
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, 'MalformedXML', done));
|
assertError(err, 'MalformedXML', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow config with And & Tag', done => {
|
test('should not allow config with And & Tag', done => {
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
value: { Tag: { Key: 'foo', Value: 'bar' }, And: {} },
|
value: { Tag: { Key: 'foo', Value: 'bar' }, And: {} },
|
||||||
|
@ -273,7 +266,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
assertError(err, 'MalformedXML', done));
|
assertError(err, 'MalformedXML', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow config with Prefix & Tag', done => {
|
test('should not allow config with Prefix & Tag', done => {
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
value: { Tag: { Key: 'foo', Value: 'bar' }, Prefix: 'foo' },
|
value: { Tag: { Key: 'foo', Value: 'bar' }, Prefix: 'foo' },
|
||||||
|
@ -282,14 +275,14 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
assertError(err, 'MalformedXML', done));
|
assertError(err, 'MalformedXML', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow config with only Prefix', done => {
|
test('should allow config with only Prefix', done => {
|
||||||
const params = getLifecycleParams(
|
const params = getLifecycleParams(
|
||||||
{ key: 'Filter', value: { Prefix: 'foo' } });
|
{ key: 'Filter', value: { Prefix: 'foo' } });
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, null, done));
|
assertError(err, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow config with only Tag', done => {
|
test('should allow config with only Tag', done => {
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
value: { Tag: { Key: 'foo', Value: 'ba' } },
|
value: { Tag: { Key: 'foo', Value: 'ba' } },
|
||||||
|
@ -298,15 +291,14 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
assertError(err, null, done));
|
assertError(err, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow config with And.Prefix & no And.Tags',
|
test('should not allow config with And.Prefix & no And.Tags', done => {
|
||||||
done => {
|
|
||||||
const params = getLifecycleParams(
|
const params = getLifecycleParams(
|
||||||
{ key: 'Filter', value: { And: { Prefix: 'foo' } } });
|
{ key: 'Filter', value: { And: { Prefix: 'foo' } } });
|
||||||
s3.putBucketLifecycleConfiguration(params, err =>
|
s3.putBucketLifecycleConfiguration(params, err =>
|
||||||
assertError(err, 'MalformedXML', done));
|
assertError(err, 'MalformedXML', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow config with only one And.Tags', done => {
|
test('should not allow config with only one And.Tags', done => {
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
value: { And: { Tags: [{ Key: 'f', Value: 'b' }] } },
|
value: { And: { Tags: [{ Key: 'f', Value: 'b' }] } },
|
||||||
|
@ -315,8 +307,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
assertError(err, 'MalformedXML', done));
|
assertError(err, 'MalformedXML', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow config with And.Tags & no And.Prefix',
|
test('should allow config with And.Tags & no And.Prefix', done => {
|
||||||
done => {
|
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
value: { And: { Tags:
|
value: { And: { Tags:
|
||||||
|
@ -328,7 +319,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
assertError(err, null, done));
|
assertError(err, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow config with And.Prefix & And.Tags', done => {
|
test('should allow config with And.Prefix & And.Tags', done => {
|
||||||
const params = getLifecycleParams({
|
const params = getLifecycleParams({
|
||||||
key: 'Filter',
|
key: 'Filter',
|
||||||
value: { And: { Prefix: 'foo',
|
value: { And: { Prefix: 'foo',
|
||||||
|
@ -358,7 +349,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
it('should allow NoncurrentDays and StorageClass', done => {
|
test('should allow NoncurrentDays and StorageClass', done => {
|
||||||
const noncurrentVersionTransitions = [{
|
const noncurrentVersionTransitions = [{
|
||||||
NoncurrentDays: 0,
|
NoncurrentDays: 0,
|
||||||
StorageClass: 'us-east-2',
|
StorageClass: 'us-east-2',
|
||||||
|
@ -370,7 +361,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow duplicate StorageClass', done => {
|
test('should not allow duplicate StorageClass', done => {
|
||||||
const noncurrentVersionTransitions = [{
|
const noncurrentVersionTransitions = [{
|
||||||
NoncurrentDays: 1,
|
NoncurrentDays: 1,
|
||||||
StorageClass: 'us-east-2',
|
StorageClass: 'us-east-2',
|
||||||
|
@ -380,77 +371,70 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
}];
|
}];
|
||||||
const params = getParams(noncurrentVersionTransitions);
|
const params = getParams(noncurrentVersionTransitions);
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.strictEqual(err.code, 'InvalidRequest');
|
expect(err.code).toBe('InvalidRequest');
|
||||||
assert.strictEqual(err.message,
|
expect(err.message).toBe("'StorageClass' must be different for " +
|
||||||
"'StorageClass' must be different for " +
|
|
||||||
"'NoncurrentVersionTransition' actions in same " +
|
"'NoncurrentVersionTransition' actions in same " +
|
||||||
"'Rule' with prefix ''");
|
"'Rule' with prefix ''");
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow unknown StorageClass',
|
test('should not allow unknown StorageClass', done => {
|
||||||
done => {
|
|
||||||
const noncurrentVersionTransitions = [{
|
const noncurrentVersionTransitions = [{
|
||||||
NoncurrentDays: 1,
|
NoncurrentDays: 1,
|
||||||
StorageClass: 'unknown',
|
StorageClass: 'unknown',
|
||||||
}];
|
}];
|
||||||
const params = getParams(noncurrentVersionTransitions);
|
const params = getParams(noncurrentVersionTransitions);
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.strictEqual(err.code, 'MalformedXML');
|
expect(err.code).toBe('MalformedXML');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should not allow NoncurrentDays value exceeding ${MAX_DAYS}`,
|
test(`should not allow NoncurrentDays value exceeding ${MAX_DAYS}`, done => {
|
||||||
done => {
|
|
||||||
const noncurrentVersionTransitions = [{
|
const noncurrentVersionTransitions = [{
|
||||||
NoncurrentDays: MAX_DAYS + 1,
|
NoncurrentDays: MAX_DAYS + 1,
|
||||||
StorageClass: 'us-east-2',
|
StorageClass: 'us-east-2',
|
||||||
}];
|
}];
|
||||||
const params = getParams(noncurrentVersionTransitions);
|
const params = getParams(noncurrentVersionTransitions);
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.strictEqual(err.code, 'MalformedXML');
|
expect(err.code).toBe('MalformedXML');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow negative NoncurrentDays',
|
test('should not allow negative NoncurrentDays', done => {
|
||||||
done => {
|
|
||||||
const noncurrentVersionTransitions = [{
|
const noncurrentVersionTransitions = [{
|
||||||
NoncurrentDays: -1,
|
NoncurrentDays: -1,
|
||||||
StorageClass: 'us-east-2',
|
StorageClass: 'us-east-2',
|
||||||
}];
|
}];
|
||||||
const params = getParams(noncurrentVersionTransitions);
|
const params = getParams(noncurrentVersionTransitions);
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.strictEqual(err.code, 'InvalidArgument');
|
expect(err.code).toBe('InvalidArgument');
|
||||||
assert.strictEqual(err.message,
|
expect(err.message).toBe("'NoncurrentDays' in NoncurrentVersionTransition " +
|
||||||
"'NoncurrentDays' in NoncurrentVersionTransition " +
|
|
||||||
'action must be nonnegative');
|
'action must be nonnegative');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow config missing NoncurrentDays',
|
test('should not allow config missing NoncurrentDays', done => {
|
||||||
done => {
|
|
||||||
const noncurrentVersionTransitions = [{
|
const noncurrentVersionTransitions = [{
|
||||||
StorageClass: 'us-east-2',
|
StorageClass: 'us-east-2',
|
||||||
}];
|
}];
|
||||||
const params = getParams(noncurrentVersionTransitions);
|
const params = getParams(noncurrentVersionTransitions);
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.strictEqual(err.code, 'MalformedXML');
|
expect(err.code).toBe('MalformedXML');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow config missing StorageClass',
|
test('should not allow config missing StorageClass', done => {
|
||||||
done => {
|
|
||||||
const noncurrentVersionTransitions = [{
|
const noncurrentVersionTransitions = [{
|
||||||
NoncurrentDays: 1,
|
NoncurrentDays: 1,
|
||||||
}];
|
}];
|
||||||
const params = getParams(noncurrentVersionTransitions);
|
const params = getParams(noncurrentVersionTransitions);
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.strictEqual(err.code, 'MalformedXML');
|
expect(err.code).toBe('MalformedXML');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -471,7 +455,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
it('should allow Days', done => {
|
test('should allow Days', done => {
|
||||||
const transitions = [{
|
const transitions = [{
|
||||||
Days: 0,
|
Days: 0,
|
||||||
StorageClass: 'us-east-2',
|
StorageClass: 'us-east-2',
|
||||||
|
@ -483,33 +467,32 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should not allow Days value exceeding ${MAX_DAYS}`, done => {
|
test(`should not allow Days value exceeding ${MAX_DAYS}`, done => {
|
||||||
const transitions = [{
|
const transitions = [{
|
||||||
Days: MAX_DAYS + 1,
|
Days: MAX_DAYS + 1,
|
||||||
StorageClass: 'us-east-2',
|
StorageClass: 'us-east-2',
|
||||||
}];
|
}];
|
||||||
const params = getParams(transitions);
|
const params = getParams(transitions);
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.strictEqual(err.code, 'MalformedXML');
|
expect(err.code).toBe('MalformedXML');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow negative Days value', done => {
|
test('should not allow negative Days value', done => {
|
||||||
const transitions = [{
|
const transitions = [{
|
||||||
Days: -1,
|
Days: -1,
|
||||||
StorageClass: 'us-east-2',
|
StorageClass: 'us-east-2',
|
||||||
}];
|
}];
|
||||||
const params = getParams(transitions);
|
const params = getParams(transitions);
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.strictEqual(err.code, 'InvalidArgument');
|
expect(err.code).toBe('InvalidArgument');
|
||||||
assert.strictEqual(err.message,
|
expect(err.message).toBe("'Days' in Transition action must be nonnegative");
|
||||||
"'Days' in Transition action must be nonnegative");
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow duplicate StorageClass', done => {
|
test('should not allow duplicate StorageClass', done => {
|
||||||
const transitions = [{
|
const transitions = [{
|
||||||
Days: 1,
|
Days: 1,
|
||||||
StorageClass: 'us-east-2',
|
StorageClass: 'us-east-2',
|
||||||
|
@ -519,16 +502,15 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
}];
|
}];
|
||||||
const params = getParams(transitions);
|
const params = getParams(transitions);
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.strictEqual(err.code, 'InvalidRequest');
|
expect(err.code).toBe('InvalidRequest');
|
||||||
assert.strictEqual(err.message,
|
expect(err.message).toBe("'StorageClass' must be different for 'Transition' " +
|
||||||
"'StorageClass' must be different for 'Transition' " +
|
|
||||||
"actions in same 'Rule' with prefix ''");
|
"actions in same 'Rule' with prefix ''");
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// TODO: Upgrade to aws-sdk >= 2.60.0 for correct Date field support
|
// TODO: Upgrade to aws-sdk >= 2.60.0 for correct Date field support
|
||||||
it.skip('should allow Date', done => {
|
test.skip('should allow Date', done => {
|
||||||
const transitions = [{
|
const transitions = [{
|
||||||
Date: '2016-01-01T00:00:00.000Z',
|
Date: '2016-01-01T00:00:00.000Z',
|
||||||
StorageClass: 'us-east-2',
|
StorageClass: 'us-east-2',
|
||||||
|
@ -541,8 +523,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
// TODO: Upgrade to aws-sdk >= 2.60.0 for correct Date field support
|
// TODO: Upgrade to aws-sdk >= 2.60.0 for correct Date field support
|
||||||
it.skip('should not allow speficying both Days and Date value',
|
test.skip('should not allow speficying both Days and Date value', done => {
|
||||||
done => {
|
|
||||||
const transitions = [{
|
const transitions = [{
|
||||||
Date: '2016-01-01T00:00:00.000Z',
|
Date: '2016-01-01T00:00:00.000Z',
|
||||||
Days: 1,
|
Days: 1,
|
||||||
|
@ -550,13 +531,13 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
}];
|
}];
|
||||||
const params = getParams(transitions);
|
const params = getParams(transitions);
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.strictEqual(err.code, 'MalformedXML');
|
expect(err.code).toBe('MalformedXML');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// TODO: Upgrade to aws-sdk >= 2.60.0 for correct Date field support
|
// TODO: Upgrade to aws-sdk >= 2.60.0 for correct Date field support
|
||||||
it.skip('should not allow speficying both Days and Date value ' +
|
test.skip('should not allow speficying both Days and Date value ' +
|
||||||
'across transitions', done => {
|
'across transitions', done => {
|
||||||
const transitions = [{
|
const transitions = [{
|
||||||
Date: '2016-01-01T00:00:00.000Z',
|
Date: '2016-01-01T00:00:00.000Z',
|
||||||
|
@ -567,16 +548,15 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
}];
|
}];
|
||||||
const params = getParams(transitions);
|
const params = getParams(transitions);
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.strictEqual(err.code, 'InvalidRequest');
|
expect(err.code).toBe('InvalidRequest');
|
||||||
assert.strictEqual(err.message,
|
expect(err.message).toBe("Found mixed 'Date' and 'Days' based Transition " +
|
||||||
"Found mixed 'Date' and 'Days' based Transition " +
|
|
||||||
"actions in lifecycle rule for prefix ''");
|
"actions in lifecycle rule for prefix ''");
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// TODO: Upgrade to aws-sdk >= 2.60.0 for correct Date field support
|
// TODO: Upgrade to aws-sdk >= 2.60.0 for correct Date field support
|
||||||
it.skip('should not allow speficying both Days and Date value ' +
|
test.skip('should not allow speficying both Days and Date value ' +
|
||||||
'across transitions and expiration', done => {
|
'across transitions and expiration', done => {
|
||||||
const transitions = [{
|
const transitions = [{
|
||||||
Days: 1,
|
Days: 1,
|
||||||
|
@ -585,9 +565,8 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
const params = getParams(transitions);
|
const params = getParams(transitions);
|
||||||
params.LifecycleConfiguration.Rules[0].Expiration = { Date: 0 };
|
params.LifecycleConfiguration.Rules[0].Expiration = { Date: 0 };
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.strictEqual(err.code, 'InvalidRequest');
|
expect(err.code).toBe('InvalidRequest');
|
||||||
assert.strictEqual(err.message,
|
expect(err.message).toBe("Found mixed 'Date' and 'Days' based Expiration and " +
|
||||||
"Found mixed 'Date' and 'Days' based Expiration and " +
|
|
||||||
"Transition actions in lifecycle rule for prefix ''");
|
"Transition actions in lifecycle rule for prefix ''");
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
@ -597,7 +576,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
// NoncurrentVersionTransitions not implemented
|
// NoncurrentVersionTransitions not implemented
|
||||||
describe.skip('with NoncurrentVersionTransitions and Transitions',
|
describe.skip('with NoncurrentVersionTransitions and Transitions',
|
||||||
() => {
|
() => {
|
||||||
it('should allow config', done => {
|
test('should allow config', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
LifecycleConfiguration: {
|
LifecycleConfiguration: {
|
||||||
|
@ -623,7 +602,7 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow config when specifying ' +
|
test('should not allow config when specifying ' +
|
||||||
'NoncurrentVersionTransitions', done => {
|
'NoncurrentVersionTransitions', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -640,8 +619,8 @@ describe('aws-sdk test put bucket lifecycle', () => {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
s3.putBucketLifecycleConfiguration(params, err => {
|
s3.putBucketLifecycleConfiguration(params, err => {
|
||||||
assert.strictEqual(err.statusCode, 501);
|
expect(err.statusCode).toBe(501);
|
||||||
assert.strictEqual(err.code, 'NotImplemented');
|
expect(err.code).toBe('NotImplemented');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -13,13 +13,10 @@ const destinationBucket = 'destination-bucket';
|
||||||
// Check for the expected error response code and status code.
|
// Check for the expected error response code and status code.
|
||||||
function assertError(err, expectedErr) {
|
function assertError(err, expectedErr) {
|
||||||
if (expectedErr === null) {
|
if (expectedErr === null) {
|
||||||
assert.strictEqual(err, null, `expected no error but got '${err}'`);
|
expect(err).toBe(null);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(err.code, expectedErr, 'incorrect error response ' +
|
expect(err.code).toBe(expectedErr);
|
||||||
`code: should be '${expectedErr}' but got '${err.code}'`);
|
expect(err.statusCode).toBe(errors[expectedErr].code);
|
||||||
assert.strictEqual(err.statusCode, errors[expectedErr].code,
|
|
||||||
'incorrect error status code: should be 400 but got ' +
|
|
||||||
`'${err.statusCode}'`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -83,7 +80,7 @@ describe('aws-node-sdk test putBucketReplication bucket status', () => {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
const config = getConfig('default', { signatureVersion: 'v4' });
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
s3 = new S3(config);
|
s3 = new S3(config);
|
||||||
otherAccountS3 = new BucketUtility('lisa', {}).s3;
|
otherAccountS3 = new BucketUtility('lisa', {}).s3;
|
||||||
|
@ -91,7 +88,7 @@ describe('aws-node-sdk test putBucketReplication bucket status', () => {
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return \'NoSuchBucket\' error if bucket does not exist', done =>
|
test('should return \'NoSuchBucket\' error if bucket does not exist', done =>
|
||||||
s3.putBucketReplication(replicationParams, err => {
|
s3.putBucketReplication(replicationParams, err => {
|
||||||
assertError(err, 'NoSuchBucket');
|
assertError(err, 'NoSuchBucket');
|
||||||
return done();
|
return done();
|
||||||
|
@ -102,29 +99,29 @@ describe('aws-node-sdk test putBucketReplication bucket status', () => {
|
||||||
|
|
||||||
afterEach(done => s3.deleteBucket({ Bucket: sourceBucket }, done));
|
afterEach(done => s3.deleteBucket({ Bucket: sourceBucket }, done));
|
||||||
|
|
||||||
it('should return AccessDenied if user is not bucket owner', done =>
|
test('should return AccessDenied if user is not bucket owner', done =>
|
||||||
otherAccountS3.putBucketReplication(replicationParams,
|
otherAccountS3.putBucketReplication(replicationParams,
|
||||||
err => {
|
err => {
|
||||||
assert(err);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'AccessDenied');
|
expect(err.code).toBe('AccessDenied');
|
||||||
assert.strictEqual(err.statusCode, 403);
|
expect(err.statusCode).toBe(403);
|
||||||
return done();
|
return done();
|
||||||
}));
|
}));
|
||||||
|
|
||||||
it('should not put configuration on bucket without versioning', done =>
|
test('should not put configuration on bucket without versioning', done =>
|
||||||
s3.putBucketReplication(replicationParams, err => {
|
s3.putBucketReplication(replicationParams, err => {
|
||||||
assertError(err, 'InvalidRequest');
|
assertError(err, 'InvalidRequest');
|
||||||
return done();
|
return done();
|
||||||
}));
|
}));
|
||||||
|
|
||||||
it('should not put configuration on bucket with \'Suspended\'' +
|
test('should not put configuration on bucket with \'Suspended\'' +
|
||||||
'versioning', done =>
|
'versioning', done =>
|
||||||
checkVersioningError(s3, 'Suspended', 'InvalidRequest', done));
|
checkVersioningError(s3, 'Suspended', 'InvalidRequest', done));
|
||||||
|
|
||||||
it('should put configuration on a bucket with versioning', done =>
|
test('should put configuration on a bucket with versioning', done =>
|
||||||
checkVersioningError(s3, 'Enabled', null, done));
|
checkVersioningError(s3, 'Enabled', null, done));
|
||||||
|
|
||||||
it('should put configuration on a bucket with versioning if ' +
|
test('should put configuration on a bucket with versioning if ' +
|
||||||
'user is a replication user', done =>
|
'user is a replication user', done =>
|
||||||
checkVersioningError(replicationAccountS3, 'Enabled', null, done));
|
checkVersioningError(replicationAccountS3, 'Enabled', null, done));
|
||||||
});
|
});
|
||||||
|
@ -157,15 +154,14 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => {
|
||||||
const Role = ARN === '' || ARN === ',' ? ARN : `${ARN},${ARN}`;
|
const Role = ARN === '' || ARN === ',' ? ARN : `${ARN},${ARN}`;
|
||||||
const config = Object.assign({}, replicationConfig, { Role });
|
const config = Object.assign({}, replicationConfig, { Role });
|
||||||
|
|
||||||
it('should not accept configuration when \'Role\' is not a ' +
|
test('should not accept configuration when \'Role\' is not a ' +
|
||||||
'comma-separated list of two valid Amazon Resource Names: ' +
|
'comma-separated list of two valid Amazon Resource Names: ' +
|
||||||
`'${Role}'`, done =>
|
`'${Role}'`, done =>
|
||||||
checkError(config, 'InvalidArgument', done));
|
checkError(config, 'InvalidArgument', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not accept configuration when \'Role\' is a comma-separated ' +
|
test('should not accept configuration when \'Role\' is a comma-separated ' +
|
||||||
'list of more than two valid Amazon Resource Names',
|
'list of more than two valid Amazon Resource Names', done => {
|
||||||
done => {
|
|
||||||
const Role = 'arn:aws:iam::account-id:role/resource-1,' +
|
const Role = 'arn:aws:iam::account-id:role/resource-1,' +
|
||||||
'arn:aws:iam::account-id:role/resource-2,' +
|
'arn:aws:iam::account-id:role/resource-2,' +
|
||||||
'arn:aws:iam::account-id:role/resource-3';
|
'arn:aws:iam::account-id:role/resource-3';
|
||||||
|
@ -182,13 +178,13 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => {
|
||||||
});
|
});
|
||||||
config.Role = ARN;
|
config.Role = ARN;
|
||||||
|
|
||||||
it('should accept configuration if \'Role\' is a single valid ' +
|
test('should accept configuration if \'Role\' is a single valid ' +
|
||||||
`Amazon Resource Name: '${ARN}', and a rule storageClass defines ` +
|
`Amazon Resource Name: '${ARN}', and a rule storageClass defines ` +
|
||||||
'an external location', done =>
|
'an external location', done =>
|
||||||
checkError(config, null, done));
|
checkError(config, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow a combination of storageClasses across rules', done => {
|
test('should allow a combination of storageClasses across rules', done => {
|
||||||
const config = setConfigRules([replicationConfig.Rules[0], {
|
const config = setConfigRules([replicationConfig.Rules[0], {
|
||||||
Destination: {
|
Destination: {
|
||||||
Bucket: `arn:aws:s3:::${destinationBucket}`,
|
Bucket: `arn:aws:s3:::${destinationBucket}`,
|
||||||
|
@ -202,7 +198,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => {
|
||||||
checkError(config, null, done);
|
checkError(config, null, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow a comma separated list of roles when a rule ' +
|
test('should not allow a comma separated list of roles when a rule ' +
|
||||||
'storageClass defines an external location', done => {
|
'storageClass defines an external location', done => {
|
||||||
const config = {
|
const config = {
|
||||||
Role: 'arn:aws:iam::account-id:role/src-resource,' +
|
Role: 'arn:aws:iam::account-id:role/src-resource,' +
|
||||||
|
@ -225,7 +221,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => {
|
||||||
const Role = `${ARN},${ARN}`;
|
const Role = `${ARN},${ARN}`;
|
||||||
const config = Object.assign({}, replicationConfig, { Role });
|
const config = Object.assign({}, replicationConfig, { Role });
|
||||||
|
|
||||||
it('should accept configuration when \'Role\' is a comma-separated ' +
|
test('should accept configuration when \'Role\' is a comma-separated ' +
|
||||||
`list of two valid Amazon Resource Names: '${Role}'`, done =>
|
`list of two valid Amazon Resource Names: '${Role}'`, done =>
|
||||||
checkError(config, null, done));
|
checkError(config, null, done));
|
||||||
});
|
});
|
||||||
|
@ -233,17 +229,17 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => {
|
||||||
replicationUtils.invalidBucketARNs.forEach(ARN => {
|
replicationUtils.invalidBucketARNs.forEach(ARN => {
|
||||||
const config = setConfigRules({ Destination: { Bucket: ARN } });
|
const config = setConfigRules({ Destination: { Bucket: ARN } });
|
||||||
|
|
||||||
it('should not accept configuration when \'Bucket\' is not a ' +
|
test('should not accept configuration when \'Bucket\' is not a ' +
|
||||||
`valid Amazon Resource Name format: '${ARN}'`, done =>
|
`valid Amazon Resource Name format: '${ARN}'`, done =>
|
||||||
checkError(config, 'InvalidArgument', done));
|
checkError(config, 'InvalidArgument', done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not accept configuration when \'Rules\' is empty ', done => {
|
test('should not accept configuration when \'Rules\' is empty ', done => {
|
||||||
const config = Object.assign({}, replicationConfig, { Rules: [] });
|
const config = Object.assign({}, replicationConfig, { Rules: [] });
|
||||||
return checkError(config, 'MalformedXML', done);
|
return checkError(config, 'MalformedXML', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not accept configuration when \'Rules\' is > 1000', done => {
|
test('should not accept configuration when \'Rules\' is > 1000', done => {
|
||||||
const arr = [];
|
const arr = [];
|
||||||
for (let i = 0; i < 1001; i++) {
|
for (let i = 0; i < 1001; i++) {
|
||||||
arr.push({
|
arr.push({
|
||||||
|
@ -256,13 +252,13 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => {
|
||||||
return checkError(config, 'InvalidRequest', done);
|
return checkError(config, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not accept configuration when \'ID\' length is > 255', done => {
|
test('should not accept configuration when \'ID\' length is > 255', done => {
|
||||||
// Set ID to a string of length 256.
|
// Set ID to a string of length 256.
|
||||||
const config = setConfigRules({ ID: new Array(257).join('x') });
|
const config = setConfigRules({ ID: new Array(257).join('x') });
|
||||||
return checkError(config, 'InvalidArgument', done);
|
return checkError(config, 'InvalidArgument', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not accept configuration when \'ID\' is not unique', done => {
|
test('should not accept configuration when \'ID\' is not unique', done => {
|
||||||
const rule1 = replicationConfig.Rules[0];
|
const rule1 = replicationConfig.Rules[0];
|
||||||
// Prefix is unique, but not the ID.
|
// Prefix is unique, but not the ID.
|
||||||
const rule2 = Object.assign({}, rule1, { Prefix: 'bar' });
|
const rule2 = Object.assign({}, rule1, { Prefix: 'bar' });
|
||||||
|
@ -270,7 +266,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => {
|
||||||
return checkError(config, 'InvalidRequest', done);
|
return checkError(config, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept configuration when \'ID\' is not provided for multiple ' +
|
test('should accept configuration when \'ID\' is not provided for multiple ' +
|
||||||
'rules', done => {
|
'rules', done => {
|
||||||
const replicationConfigWithoutID = Object.assign({}, replicationConfig);
|
const replicationConfigWithoutID = Object.assign({}, replicationConfig);
|
||||||
const rule1 = replicationConfigWithoutID.Rules[0];
|
const rule1 = replicationConfigWithoutID.Rules[0];
|
||||||
|
@ -283,32 +279,33 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => {
|
||||||
replicationUtils.validStatuses.forEach(status => {
|
replicationUtils.validStatuses.forEach(status => {
|
||||||
const config = setConfigRules({ Status: status });
|
const config = setConfigRules({ Status: status });
|
||||||
|
|
||||||
it(`should accept configuration when 'Role' is ${status}`, done =>
|
test(`should accept configuration when 'Role' is ${status}`, done =>
|
||||||
checkError(config, null, done));
|
checkError(config, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not accept configuration when \'Status\' is invalid', done => {
|
test('should not accept configuration when \'Status\' is invalid', done => {
|
||||||
// Status must either be 'Enabled' or 'Disabled'.
|
// Status must either be 'Enabled' or 'Disabled'.
|
||||||
const config = setConfigRules({ Status: 'Invalid' });
|
const config = setConfigRules({ Status: 'Invalid' });
|
||||||
return checkError(config, 'MalformedXML', done);
|
return checkError(config, 'MalformedXML', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept configuration when \'Prefix\' is \'\'',
|
test('should accept configuration when \'Prefix\' is \'\'', done => {
|
||||||
done => {
|
|
||||||
const config = setConfigRules({ Prefix: '' });
|
const config = setConfigRules({ Prefix: '' });
|
||||||
return checkError(config, null, done);
|
return checkError(config, null, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not accept configuration when \'Prefix\' length is > 1024',
|
test(
|
||||||
|
'should not accept configuration when \'Prefix\' length is > 1024',
|
||||||
done => {
|
done => {
|
||||||
// Set Prefix to a string of length of 1025.
|
// Set Prefix to a string of length of 1025.
|
||||||
const config = setConfigRules({
|
const config = setConfigRules({
|
||||||
Prefix: new Array(1026).join('x'),
|
Prefix: new Array(1026).join('x'),
|
||||||
});
|
});
|
||||||
return checkError(config, 'InvalidArgument', done);
|
return checkError(config, 'InvalidArgument', done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should not accept configuration when rules contain overlapping ' +
|
test('should not accept configuration when rules contain overlapping ' +
|
||||||
'\'Prefix\' values: new prefix starts with used prefix', done => {
|
'\'Prefix\' values: new prefix starts with used prefix', done => {
|
||||||
const config = setConfigRules([replicationConfig.Rules[0], {
|
const config = setConfigRules([replicationConfig.Rules[0], {
|
||||||
Destination: { Bucket: `arn:aws:s3:::${destinationBucket}` },
|
Destination: { Bucket: `arn:aws:s3:::${destinationBucket}` },
|
||||||
|
@ -318,7 +315,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => {
|
||||||
return checkError(config, 'InvalidRequest', done);
|
return checkError(config, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not accept configuration when rules contain overlapping ' +
|
test('should not accept configuration when rules contain overlapping ' +
|
||||||
'\'Prefix\' values: used prefix starts with new prefix', done => {
|
'\'Prefix\' values: used prefix starts with new prefix', done => {
|
||||||
const config = setConfigRules([replicationConfig.Rules[0], {
|
const config = setConfigRules([replicationConfig.Rules[0], {
|
||||||
Destination: { Bucket: `arn:aws:s3:::${destinationBucket}` },
|
Destination: { Bucket: `arn:aws:s3:::${destinationBucket}` },
|
||||||
|
@ -328,7 +325,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => {
|
||||||
return checkError(config, 'InvalidRequest', done);
|
return checkError(config, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not accept configuration when \'Destination\' properties of ' +
|
test('should not accept configuration when \'Destination\' properties of ' +
|
||||||
'two or more rules specify different buckets', done => {
|
'two or more rules specify different buckets', done => {
|
||||||
const config = setConfigRules([replicationConfig.Rules[0], {
|
const config = setConfigRules([replicationConfig.Rules[0], {
|
||||||
Destination: { Bucket: `arn:aws:s3:::${destinationBucket}-1` },
|
Destination: { Bucket: `arn:aws:s3:::${destinationBucket}-1` },
|
||||||
|
@ -346,7 +343,7 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept configuration when \'StorageClass\' is ' +
|
test('should accept configuration when \'StorageClass\' is ' +
|
||||||
`${storageClass}`, done => checkError(config, null, done));
|
`${storageClass}`, done => checkError(config, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -359,11 +356,12 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept configuration when \'StorageClass\' is ' +
|
test('should accept configuration when \'StorageClass\' is ' +
|
||||||
`${storageClass}`, done => checkError(config, null, done));
|
`${storageClass}`, done => checkError(config, null, done));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not accept configuration when \'StorageClass\' is invalid',
|
test(
|
||||||
|
'should not accept configuration when \'StorageClass\' is invalid',
|
||||||
done => {
|
done => {
|
||||||
const config = setConfigRules({
|
const config = setConfigRules({
|
||||||
Destination: {
|
Destination: {
|
||||||
|
@ -372,5 +370,6 @@ describe('aws-node-sdk test putBucketReplication configuration rules', () => {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
return checkError(config, 'MalformedXML', done);
|
return checkError(config, 'MalformedXML', done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
|
@ -42,9 +42,9 @@ describe('PUT bucket cors', () => {
|
||||||
function _testPutBucketCors(rules, statusCode, errMsg, cb) {
|
function _testPutBucketCors(rules, statusCode, errMsg, cb) {
|
||||||
s3.putBucketCors({ Bucket: bucketName,
|
s3.putBucketCors({ Bucket: bucketName,
|
||||||
CORSConfiguration: rules }, err => {
|
CORSConfiguration: rules }, err => {
|
||||||
assert(err, 'Expected err but found none');
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, errMsg);
|
expect(err.code).toBe(errMsg);
|
||||||
assert.strictEqual(err.statusCode, statusCode);
|
expect(err.statusCode).toBe(statusCode);
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -53,15 +53,15 @@ describe('PUT bucket cors', () => {
|
||||||
|
|
||||||
afterEach(() => bucketUtil.deleteOne(bucketName));
|
afterEach(() => bucketUtil.deleteOne(bucketName));
|
||||||
|
|
||||||
it('should put a bucket cors successfully', done => {
|
test('should put a bucket cors successfully', done => {
|
||||||
s3.putBucketCors({ Bucket: bucketName,
|
s3.putBucketCors({ Bucket: bucketName,
|
||||||
CORSConfiguration: sampleCors }, err => {
|
CORSConfiguration: sampleCors }, err => {
|
||||||
assert.strictEqual(err, null, `Found unexpected err ${err}`);
|
expect(err).toBe(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest if more than 100 rules', done => {
|
test('should return InvalidRequest if more than 100 rules', done => {
|
||||||
const sampleRule = {
|
const sampleRule = {
|
||||||
AllowedMethods: ['PUT', 'POST', 'DELETE'],
|
AllowedMethods: ['PUT', 'POST', 'DELETE'],
|
||||||
AllowedOrigins: ['http://www.example.com'],
|
AllowedOrigins: ['http://www.example.com'],
|
||||||
|
@ -76,50 +76,48 @@ describe('PUT bucket cors', () => {
|
||||||
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return MalformedXML if missing AllowedOrigin', done => {
|
test('should return MalformedXML if missing AllowedOrigin', done => {
|
||||||
const testCors = _corsTemplate({ AllowedOrigins: [] });
|
const testCors = _corsTemplate({ AllowedOrigins: [] });
|
||||||
_testPutBucketCors(testCors, 400, 'MalformedXML', done);
|
_testPutBucketCors(testCors, 400, 'MalformedXML', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest if more than one asterisk in ' +
|
test('should return InvalidRequest if more than one asterisk in ' +
|
||||||
'AllowedOrigin', done => {
|
'AllowedOrigin', done => {
|
||||||
const testCors =
|
const testCors =
|
||||||
_corsTemplate({ AllowedOrigins: ['http://*.*.com'] });
|
_corsTemplate({ AllowedOrigins: ['http://*.*.com'] });
|
||||||
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return MalformedXML if missing AllowedMethod', done => {
|
test('should return MalformedXML if missing AllowedMethod', done => {
|
||||||
const testCors = _corsTemplate({ AllowedMethods: [] });
|
const testCors = _corsTemplate({ AllowedMethods: [] });
|
||||||
_testPutBucketCors(testCors, 400, 'MalformedXML', done);
|
_testPutBucketCors(testCors, 400, 'MalformedXML', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest if AllowedMethod is not a valid ' +
|
test('should return InvalidRequest if AllowedMethod is not a valid ' +
|
||||||
'method', done => {
|
'method', done => {
|
||||||
const testCors = _corsTemplate({ AllowedMethods: ['test'] });
|
const testCors = _corsTemplate({ AllowedMethods: ['test'] });
|
||||||
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest for lowercase value for ' +
|
test('should return InvalidRequest for lowercase value for ' +
|
||||||
'AllowedMethod', done => {
|
'AllowedMethod', done => {
|
||||||
const testCors = _corsTemplate({ AllowedMethods: ['put', 'get'] });
|
const testCors = _corsTemplate({ AllowedMethods: ['put', 'get'] });
|
||||||
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest if more than one asterisk in ' +
|
test('should return InvalidRequest if more than one asterisk in ' +
|
||||||
'AllowedHeader', done => {
|
'AllowedHeader', done => {
|
||||||
const testCors = _corsTemplate({ AllowedHeaders: ['*-amz-*'] });
|
const testCors = _corsTemplate({ AllowedHeaders: ['*-amz-*'] });
|
||||||
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest if ExposeHeader has character ' +
|
test('should return InvalidRequest if ExposeHeader has character ' +
|
||||||
'that is not dash or alphanumeric',
|
'that is not dash or alphanumeric', done => {
|
||||||
done => {
|
|
||||||
const testCors = _corsTemplate({ ExposeHeaders: ['test header'] });
|
const testCors = _corsTemplate({ ExposeHeaders: ['test header'] });
|
||||||
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest if ExposeHeader has wildcard',
|
test('should return InvalidRequest if ExposeHeader has wildcard', done => {
|
||||||
done => {
|
|
||||||
const testCors = _corsTemplate({ ExposeHeaders: ['x-amz-*'] });
|
const testCors = _corsTemplate({ ExposeHeaders: ['x-amz-*'] });
|
||||||
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
_testPutBucketCors(testCors, 400, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
|
@ -14,9 +14,9 @@ describe('PUT bucket website', () => {
|
||||||
function _testPutBucketWebsite(config, statusCode, errMsg, cb) {
|
function _testPutBucketWebsite(config, statusCode, errMsg, cb) {
|
||||||
s3.putBucketWebsite({ Bucket: bucketName,
|
s3.putBucketWebsite({ Bucket: bucketName,
|
||||||
WebsiteConfiguration: config }, err => {
|
WebsiteConfiguration: config }, err => {
|
||||||
assert(err, 'Expected err but found none');
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, errMsg);
|
expect(err.code).toBe(errMsg);
|
||||||
assert.strictEqual(err.statusCode, statusCode);
|
expect(err.statusCode).toBe(statusCode);
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -44,22 +44,22 @@ describe('PUT bucket website', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put a bucket website successfully', done => {
|
test('should put a bucket website successfully', done => {
|
||||||
const config = new WebsiteConfigTester('index.html');
|
const config = new WebsiteConfigTester('index.html');
|
||||||
s3.putBucketWebsite({ Bucket: bucketName,
|
s3.putBucketWebsite({ Bucket: bucketName,
|
||||||
WebsiteConfiguration: config }, err => {
|
WebsiteConfiguration: config }, err => {
|
||||||
assert.strictEqual(err, null, `Found unexpected err ${err}`);
|
expect(err).toBe(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidArgument if IndexDocument or ' +
|
test('should return InvalidArgument if IndexDocument or ' +
|
||||||
'RedirectAllRequestsTo is not provided', done => {
|
'RedirectAllRequestsTo is not provided', done => {
|
||||||
const config = new WebsiteConfigTester();
|
const config = new WebsiteConfigTester();
|
||||||
_testPutBucketWebsite(config, 400, 'InvalidArgument', done);
|
_testPutBucketWebsite(config, 400, 'InvalidArgument', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an InvalidRequest if both ' +
|
test('should return an InvalidRequest if both ' +
|
||||||
'RedirectAllRequestsTo and IndexDocument are provided', done => {
|
'RedirectAllRequestsTo and IndexDocument are provided', done => {
|
||||||
const redirectAllTo = {
|
const redirectAllTo = {
|
||||||
HostName: 'test',
|
HostName: 'test',
|
||||||
|
@ -71,12 +71,12 @@ describe('PUT bucket website', () => {
|
||||||
_testPutBucketWebsite(config, 400, 'InvalidRequest', done);
|
_testPutBucketWebsite(config, 400, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidArgument if index has slash', done => {
|
test('should return InvalidArgument if index has slash', done => {
|
||||||
const config = new WebsiteConfigTester('in/dex.html');
|
const config = new WebsiteConfigTester('in/dex.html');
|
||||||
_testPutBucketWebsite(config, 400, 'InvalidArgument', done);
|
_testPutBucketWebsite(config, 400, 'InvalidArgument', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest if both ReplaceKeyWith and ' +
|
test('should return InvalidRequest if both ReplaceKeyWith and ' +
|
||||||
'ReplaceKeyPrefixWith are present in same rule', done => {
|
'ReplaceKeyPrefixWith are present in same rule', done => {
|
||||||
const config = new WebsiteConfigTester('index.html');
|
const config = new WebsiteConfigTester('index.html');
|
||||||
config.addRoutingRule({ ReplaceKeyPrefixWith: 'test',
|
config.addRoutingRule({ ReplaceKeyPrefixWith: 'test',
|
||||||
|
@ -84,7 +84,7 @@ describe('PUT bucket website', () => {
|
||||||
_testPutBucketWebsite(config, 400, 'InvalidRequest', done);
|
_testPutBucketWebsite(config, 400, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest if both ReplaceKeyWith and ' +
|
test('should return InvalidRequest if both ReplaceKeyWith and ' +
|
||||||
'ReplaceKeyPrefixWith are present in same rule', done => {
|
'ReplaceKeyPrefixWith are present in same rule', done => {
|
||||||
const config = new WebsiteConfigTester('index.html');
|
const config = new WebsiteConfigTester('index.html');
|
||||||
config.addRoutingRule({ ReplaceKeyPrefixWith: 'test',
|
config.addRoutingRule({ ReplaceKeyPrefixWith: 'test',
|
||||||
|
@ -92,14 +92,14 @@ describe('PUT bucket website', () => {
|
||||||
_testPutBucketWebsite(config, 400, 'InvalidRequest', done);
|
_testPutBucketWebsite(config, 400, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest if Redirect Protocol is ' +
|
test('should return InvalidRequest if Redirect Protocol is ' +
|
||||||
'not http or https', done => {
|
'not http or https', done => {
|
||||||
const config = new WebsiteConfigTester('index.html');
|
const config = new WebsiteConfigTester('index.html');
|
||||||
config.addRoutingRule({ Protocol: 'notvalidprotocol' });
|
config.addRoutingRule({ Protocol: 'notvalidprotocol' });
|
||||||
_testPutBucketWebsite(config, 400, 'InvalidRequest', done);
|
_testPutBucketWebsite(config, 400, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest if RedirectAllRequestsTo Protocol ' +
|
test('should return InvalidRequest if RedirectAllRequestsTo Protocol ' +
|
||||||
'is not http or https', done => {
|
'is not http or https', done => {
|
||||||
const redirectAllTo = {
|
const redirectAllTo = {
|
||||||
HostName: 'test',
|
HostName: 'test',
|
||||||
|
@ -109,21 +109,21 @@ describe('PUT bucket website', () => {
|
||||||
_testPutBucketWebsite(config, 400, 'InvalidRequest', done);
|
_testPutBucketWebsite(config, 400, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return MalformedXML if Redirect HttpRedirectCode ' +
|
test('should return MalformedXML if Redirect HttpRedirectCode ' +
|
||||||
'is a string that does not contains a number', done => {
|
'is a string that does not contains a number', done => {
|
||||||
const config = new WebsiteConfigTester('index.html');
|
const config = new WebsiteConfigTester('index.html');
|
||||||
config.addRoutingRule({ HttpRedirectCode: 'notvalidhttpcode' });
|
config.addRoutingRule({ HttpRedirectCode: 'notvalidhttpcode' });
|
||||||
_testPutBucketWebsite(config, 400, 'MalformedXML', done);
|
_testPutBucketWebsite(config, 400, 'MalformedXML', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest if Redirect HttpRedirectCode ' +
|
test('should return InvalidRequest if Redirect HttpRedirectCode ' +
|
||||||
'is not a valid http redirect code (3XX excepting 300)', done => {
|
'is not a valid http redirect code (3XX excepting 300)', done => {
|
||||||
const config = new WebsiteConfigTester('index.html');
|
const config = new WebsiteConfigTester('index.html');
|
||||||
config.addRoutingRule({ HttpRedirectCode: '400' });
|
config.addRoutingRule({ HttpRedirectCode: '400' });
|
||||||
_testPutBucketWebsite(config, 400, 'InvalidRequest', done);
|
_testPutBucketWebsite(config, 400, 'InvalidRequest', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest if Condition ' +
|
test('should return InvalidRequest if Condition ' +
|
||||||
'HttpErrorCodeReturnedEquals is a string that does ' +
|
'HttpErrorCodeReturnedEquals is a string that does ' +
|
||||||
' not contain a number', done => {
|
' not contain a number', done => {
|
||||||
const condition = { HttpErrorCodeReturnedEquals: 'notvalidcode' };
|
const condition = { HttpErrorCodeReturnedEquals: 'notvalidcode' };
|
||||||
|
@ -132,7 +132,7 @@ describe('PUT bucket website', () => {
|
||||||
_testPutBucketWebsite(config, 400, 'MalformedXML', done);
|
_testPutBucketWebsite(config, 400, 'MalformedXML', done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRequest if Condition ' +
|
test('should return InvalidRequest if Condition ' +
|
||||||
'HttpErrorCodeReturnedEquals is not a valid http' +
|
'HttpErrorCodeReturnedEquals is not a valid http' +
|
||||||
'error code (4XX or 5XX)', done => {
|
'error code (4XX or 5XX)', done => {
|
||||||
const condition = { HttpErrorCodeReturnedEquals: '300' };
|
const condition = { HttpErrorCodeReturnedEquals: '300' };
|
||||||
|
|
|
@ -35,7 +35,7 @@ const Bucket = `bucket-skip-scan-${Date.now()}`;
|
||||||
|
|
||||||
describe('Skip scan cases tests', () => {
|
describe('Skip scan cases tests', () => {
|
||||||
let s3;
|
let s3;
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
const config = getConfig('default', { signatureVersion: 'v4' });
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
s3 = new AWS.S3(config);
|
s3 = new AWS.S3(config);
|
||||||
s3.createBucket(
|
s3.createBucket(
|
||||||
|
@ -60,7 +60,7 @@ describe('Skip scan cases tests', () => {
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
s3.listObjects({ Bucket }, (err, data) => {
|
s3.listObjects({ Bucket }, (err, data) => {
|
||||||
async.each(data.Contents, (o, next) => {
|
async.each(data.Contents, (o, next) => {
|
||||||
s3.deleteObject({ Bucket, Key: o.Key }, next);
|
s3.deleteObject({ Bucket, Key: o.Key }, next);
|
||||||
|
@ -69,9 +69,9 @@ describe('Skip scan cases tests', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should find all common prefixes in one shot', done => {
|
test('should find all common prefixes in one shot', done => {
|
||||||
s3.listObjects({ Bucket, Delimiter: '/' }, (err, data) => {
|
s3.listObjects({ Bucket, Delimiter: '/' }, (err, data) => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
cutAttributes(data);
|
cutAttributes(data);
|
||||||
assert.deepStrictEqual(data, {
|
assert.deepStrictEqual(data, {
|
||||||
IsTruncated: false,
|
IsTruncated: false,
|
||||||
|
|
|
@ -22,20 +22,19 @@ function deleteObjects(s3, loopId, cb) {
|
||||||
}, cb);
|
}, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('aws-node-sdk stress test bucket', function testSuite() {
|
describe('aws-node-sdk stress test bucket', () => {
|
||||||
this.timeout(120000);
|
this.timeout(120000);
|
||||||
let s3;
|
let s3;
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
const config = getConfig('default', { signatureVersion: 'v4' });
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
s3 = new S3(config);
|
s3 = new S3(config);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('createBucket-putObject-deleteObject-deleteBucket loop', done =>
|
test('createBucket-putObject-deleteObject-deleteBucket loop', done =>
|
||||||
timesSeries(loopCount, (loopId, next) => waterfall([
|
timesSeries(loopCount, (loopId, next) => waterfall([
|
||||||
next => s3.createBucket({ Bucket: bucket }, err => next(err)),
|
next => s3.createBucket({ Bucket: bucket }, err => next(err)),
|
||||||
next => putObjects(s3, loopId, err => next(err)),
|
next => putObjects(s3, loopId, err => next(err)),
|
||||||
next => deleteObjects(s3, loopId, err => next(err)),
|
next => deleteObjects(s3, loopId, err => next(err)),
|
||||||
next => s3.deleteBucket({ Bucket: bucket }, err => next(err)),
|
next => s3.deleteBucket({ Bucket: bucket }, err => next(err)),
|
||||||
], err => next(err)), done)
|
], err => next(err)), done));
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
|
|
@ -8,29 +8,28 @@ const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
const configReplication = getConfig('replication',
|
const configReplication = getConfig('replication',
|
||||||
{ signatureVersion: 'v4' });
|
{ signatureVersion: 'v4' });
|
||||||
const s3 = new S3(config);
|
const s3 = new S3(config);
|
||||||
describe('aws-node-sdk test bucket versioning', function testSuite() {
|
describe('aws-node-sdk test bucket versioning', () => {
|
||||||
this.timeout(60000);
|
this.timeout(60000);
|
||||||
let replicationAccountS3;
|
let replicationAccountS3;
|
||||||
|
|
||||||
// setup test
|
// setup test
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
replicationAccountS3 = new S3(configReplication);
|
replicationAccountS3 = new S3(configReplication);
|
||||||
s3.createBucket({ Bucket: bucket }, done);
|
s3.createBucket({ Bucket: bucket }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
// delete bucket after testing
|
// delete bucket after testing
|
||||||
after(done => s3.deleteBucket({ Bucket: bucket }, done));
|
afterAll(done => s3.deleteBucket({ Bucket: bucket }, done));
|
||||||
|
|
||||||
it('should not accept empty versioning configuration', done => {
|
test('should not accept empty versioning configuration', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
VersioningConfiguration: {},
|
VersioningConfiguration: {},
|
||||||
};
|
};
|
||||||
s3.putBucketVersioning(params, error => {
|
s3.putBucketVersioning(params, error => {
|
||||||
if (error) {
|
if (error) {
|
||||||
assert.strictEqual(error.statusCode, 400);
|
expect(error.statusCode).toBe(400);
|
||||||
assert.strictEqual(
|
expect(error.code).toBe('IllegalVersioningConfigurationException');
|
||||||
error.code, 'IllegalVersioningConfigurationException');
|
|
||||||
done();
|
done();
|
||||||
} else {
|
} else {
|
||||||
done('accepted empty versioning configuration');
|
done('accepted empty versioning configuration');
|
||||||
|
@ -38,16 +37,16 @@ describe('aws-node-sdk test bucket versioning', function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should retrieve an empty versioning configuration', done => {
|
test('should retrieve an empty versioning configuration', done => {
|
||||||
const params = { Bucket: bucket };
|
const params = { Bucket: bucket };
|
||||||
s3.getBucketVersioning(params, (error, data) => {
|
s3.getBucketVersioning(params, (error, data) => {
|
||||||
assert.strictEqual(error, null);
|
expect(error).toBe(null);
|
||||||
assert.deepStrictEqual(data, {});
|
assert.deepStrictEqual(data, {});
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not accept versioning configuration w/o "Status"', done => {
|
test('should not accept versioning configuration w/o "Status"', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
VersioningConfiguration: {
|
VersioningConfiguration: {
|
||||||
|
@ -56,9 +55,8 @@ describe('aws-node-sdk test bucket versioning', function testSuite() {
|
||||||
};
|
};
|
||||||
s3.putBucketVersioning(params, error => {
|
s3.putBucketVersioning(params, error => {
|
||||||
if (error) {
|
if (error) {
|
||||||
assert.strictEqual(error.statusCode, 400);
|
expect(error.statusCode).toBe(400);
|
||||||
assert.strictEqual(
|
expect(error.code).toBe('IllegalVersioningConfigurationException');
|
||||||
error.code, 'IllegalVersioningConfigurationException');
|
|
||||||
done();
|
done();
|
||||||
} else {
|
} else {
|
||||||
done('accepted empty versioning configuration');
|
done('accepted empty versioning configuration');
|
||||||
|
@ -66,16 +64,16 @@ describe('aws-node-sdk test bucket versioning', function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should retrieve an empty versioning configuration', done => {
|
test('should retrieve an empty versioning configuration', done => {
|
||||||
const params = { Bucket: bucket };
|
const params = { Bucket: bucket };
|
||||||
s3.getBucketVersioning(params, (error, data) => {
|
s3.getBucketVersioning(params, (error, data) => {
|
||||||
assert.strictEqual(error, null);
|
expect(error).toBe(null);
|
||||||
assert.deepStrictEqual(data, {});
|
assert.deepStrictEqual(data, {});
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not accept versioning configuration w/ invalid value', done => {
|
test('should not accept versioning configuration w/ invalid value', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
VersioningConfiguration: {
|
VersioningConfiguration: {
|
||||||
|
@ -85,9 +83,8 @@ describe('aws-node-sdk test bucket versioning', function testSuite() {
|
||||||
};
|
};
|
||||||
s3.putBucketVersioning(params, error => {
|
s3.putBucketVersioning(params, error => {
|
||||||
if (error) {
|
if (error) {
|
||||||
assert.strictEqual(error.statusCode, 400);
|
expect(error.statusCode).toBe(400);
|
||||||
assert.strictEqual(
|
expect(error.code).toBe('IllegalVersioningConfigurationException');
|
||||||
error.code, 'IllegalVersioningConfigurationException');
|
|
||||||
done();
|
done();
|
||||||
} else {
|
} else {
|
||||||
done('accepted empty versioning configuration');
|
done('accepted empty versioning configuration');
|
||||||
|
@ -95,7 +92,7 @@ describe('aws-node-sdk test bucket versioning', function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not accept versioning with MFA Delete enabled', done => {
|
test('should not accept versioning with MFA Delete enabled', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
VersioningConfiguration: {
|
VersioningConfiguration: {
|
||||||
|
@ -104,14 +101,14 @@ describe('aws-node-sdk test bucket versioning', function testSuite() {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
s3.putBucketVersioning(params, error => {
|
s3.putBucketVersioning(params, error => {
|
||||||
assert.notEqual(error, null, 'Expected failure but got success');
|
expect(error).not.toEqual(null);
|
||||||
assert.strictEqual(error.statusCode, 501);
|
expect(error.statusCode).toBe(501);
|
||||||
assert.strictEqual(error.code, 'NotImplemented');
|
expect(error.code).toBe('NotImplemented');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept versioning with MFA Delete disabled', done => {
|
test('should accept versioning with MFA Delete disabled', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
VersioningConfiguration: {
|
VersioningConfiguration: {
|
||||||
|
@ -120,23 +117,23 @@ describe('aws-node-sdk test bucket versioning', function testSuite() {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
s3.putBucketVersioning(params, error => {
|
s3.putBucketVersioning(params, error => {
|
||||||
assert.equal(error, null, 'Expected success but got failure');
|
expect(error).toEqual(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should retrieve the valid versioning configuration', done => {
|
test('should retrieve the valid versioning configuration', done => {
|
||||||
const params = { Bucket: bucket };
|
const params = { Bucket: bucket };
|
||||||
// s3.getBucketVersioning(params, done);
|
// s3.getBucketVersioning(params, done);
|
||||||
s3.getBucketVersioning(params, (error, data) => {
|
s3.getBucketVersioning(params, (error, data) => {
|
||||||
assert.strictEqual(error, null);
|
expect(error).toBe(null);
|
||||||
assert.deepStrictEqual(data, { MFADelete: 'Disabled',
|
assert.deepStrictEqual(data, { MFADelete: 'Disabled',
|
||||||
Status: 'Enabled' });
|
Status: 'Enabled' });
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept valid versioning configuration', done => {
|
test('should accept valid versioning configuration', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
VersioningConfiguration: {
|
VersioningConfiguration: {
|
||||||
|
@ -146,7 +143,7 @@ describe('aws-node-sdk test bucket versioning', function testSuite() {
|
||||||
s3.putBucketVersioning(params, done);
|
s3.putBucketVersioning(params, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept valid versioning configuration if user is a ' +
|
test('should accept valid versioning configuration if user is a ' +
|
||||||
'replication user', done => {
|
'replication user', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -157,11 +154,11 @@ describe('aws-node-sdk test bucket versioning', function testSuite() {
|
||||||
replicationAccountS3.putBucketVersioning(params, done);
|
replicationAccountS3.putBucketVersioning(params, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should retrieve the valid versioning configuration', done => {
|
test('should retrieve the valid versioning configuration', done => {
|
||||||
const params = { Bucket: bucket };
|
const params = { Bucket: bucket };
|
||||||
// s3.getBucketVersioning(params, done);
|
// s3.getBucketVersioning(params, done);
|
||||||
s3.getBucketVersioning(params, (error, data) => {
|
s3.getBucketVersioning(params, (error, data) => {
|
||||||
assert.strictEqual(error, null);
|
expect(error).toBe(null);
|
||||||
assert.deepStrictEqual(data, { Status: 'Enabled' });
|
assert.deepStrictEqual(data, { Status: 'Enabled' });
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
|
@ -171,21 +168,21 @@ describe('aws-node-sdk test bucket versioning', function testSuite() {
|
||||||
|
|
||||||
describe('bucket versioning for ingestion buckets', () => {
|
describe('bucket versioning for ingestion buckets', () => {
|
||||||
const Bucket = `ingestion-bucket-${Date.now()}`;
|
const Bucket = `ingestion-bucket-${Date.now()}`;
|
||||||
before(done => s3.createBucket({
|
beforeAll(done => s3.createBucket({
|
||||||
Bucket,
|
Bucket,
|
||||||
CreateBucketConfiguration: {
|
CreateBucketConfiguration: {
|
||||||
LocationConstraint: 'us-east-2:ingest',
|
LocationConstraint: 'us-east-2:ingest',
|
||||||
},
|
},
|
||||||
}, done));
|
}, done));
|
||||||
|
|
||||||
after(done => s3.deleteBucket({ Bucket }, done));
|
afterAll(done => s3.deleteBucket({ Bucket }, done));
|
||||||
|
|
||||||
it('should not allow suspending versioning for ingestion buckets', done => {
|
test('should not allow suspending versioning for ingestion buckets', done => {
|
||||||
s3.putBucketVersioning({ Bucket, VersioningConfiguration: {
|
s3.putBucketVersioning({ Bucket, VersioningConfiguration: {
|
||||||
Status: 'Suspended'
|
Status: 'Suspended'
|
||||||
} }, err => {
|
} }, err => {
|
||||||
assert(err, 'Expected error but got success');
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'InvalidBucketState');
|
expect(err.code).toBe('InvalidBucketState');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -6,13 +6,13 @@ describe('S3 connect test', () => {
|
||||||
const config = getConfig();
|
const config = getConfig();
|
||||||
const s3 = new S3(config);
|
const s3 = new S3(config);
|
||||||
|
|
||||||
it('should list buckets', done => {
|
test('should list buckets', done => {
|
||||||
s3.listBuckets((err, data) => {
|
s3.listBuckets((err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
done(err);
|
done(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.ok(data.Buckets, 'should contain Buckets');
|
expect(data.Buckets).toBeTruthy();
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -16,7 +16,7 @@ const itSkipAWS = process.env.AWS_ON_AIR
|
||||||
function diff(putFile, receivedFile, done) {
|
function diff(putFile, receivedFile, done) {
|
||||||
process.stdout.write(`diff ${putFile} ${receivedFile}\n`);
|
process.stdout.write(`diff ${putFile} ${receivedFile}\n`);
|
||||||
cp.spawn('diff', [putFile, receivedFile]).on('exit', code => {
|
cp.spawn('diff', [putFile, receivedFile]).on('exit', code => {
|
||||||
assert.strictEqual(code, 0);
|
expect(code).toBe(0);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -28,11 +28,11 @@ function deleteFile(file, callback) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('aws-node-sdk v2auth query tests', function testSuite() {
|
describe('aws-node-sdk v2auth query tests', () => {
|
||||||
this.timeout(60000);
|
this.timeout(60000);
|
||||||
let s3;
|
let s3;
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
const config = getConfig('default');
|
const config = getConfig('default');
|
||||||
|
|
||||||
s3 = new S3(config);
|
s3 = new S3(config);
|
||||||
|
@ -46,45 +46,45 @@ describe('aws-node-sdk v2auth query tests', function testSuite() {
|
||||||
const params = { Bucket: bucket, Expires: 604810 };
|
const params = { Bucket: bucket, Expires: 604810 };
|
||||||
const url = s3.getSignedUrl('createBucket', params);
|
const url = s3.getSignedUrl('createBucket', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'PUT', url], httpCode => {
|
provideRawOutput(['-verbose', '-X', 'PUT', url], httpCode => {
|
||||||
assert.strictEqual(httpCode, '403 FORBIDDEN');
|
expect(httpCode).toBe('403 FORBIDDEN');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error code if request occurs after expiry',
|
test('should return an error code if request occurs after expiry', done => {
|
||||||
done => {
|
|
||||||
const params = { Bucket: bucket, Expires: 1 };
|
const params = { Bucket: bucket, Expires: 1 };
|
||||||
const url = s3.getSignedUrl('createBucket', params);
|
const url = s3.getSignedUrl('createBucket', params);
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
provideRawOutput(['-verbose', '-X', 'PUT', url], httpCode => {
|
provideRawOutput(['-verbose', '-X', 'PUT', url], httpCode => {
|
||||||
assert.strictEqual(httpCode, '403 FORBIDDEN');
|
expect(httpCode).toBe('403 FORBIDDEN');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
}, 1500);
|
}, 1500);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should create a bucket', done => {
|
test('should create a bucket', done => {
|
||||||
const params = { Bucket: bucket, Expires: almostOutsideTime };
|
const params = { Bucket: bucket, Expires: almostOutsideTime };
|
||||||
const url = s3.getSignedUrl('createBucket', params);
|
const url = s3.getSignedUrl('createBucket', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'PUT', url], httpCode => {
|
provideRawOutput(['-verbose', '-X', 'PUT', url], httpCode => {
|
||||||
assert.strictEqual(httpCode, '200 OK');
|
expect(httpCode).toBe('200 OK');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should put an object', done => {
|
test('should put an object', done => {
|
||||||
const params = { Bucket: bucket, Key: 'key', Expires:
|
const params = { Bucket: bucket, Key: 'key', Expires:
|
||||||
almostOutsideTime };
|
almostOutsideTime };
|
||||||
const url = s3.getSignedUrl('putObject', params);
|
const url = s3.getSignedUrl('putObject', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'PUT', url,
|
provideRawOutput(['-verbose', '-X', 'PUT', url,
|
||||||
'--upload-file', 'uploadFile'], httpCode => {
|
'--upload-file', 'uploadFile'], httpCode => {
|
||||||
assert.strictEqual(httpCode, '200 OK');
|
expect(httpCode).toBe('200 OK');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put an object with an acl setting and a storage class setting',
|
test(
|
||||||
|
'should put an object with an acl setting and a storage class setting',
|
||||||
done => {
|
done => {
|
||||||
// This will test that upper case query parameters and lowercase
|
// This will test that upper case query parameters and lowercase
|
||||||
// query parameters (i.e., 'x-amz-acl') are being sorted properly.
|
// query parameters (i.e., 'x-amz-acl') are being sorted properly.
|
||||||
|
@ -96,46 +96,47 @@ describe('aws-node-sdk v2auth query tests', function testSuite() {
|
||||||
const url = s3.getSignedUrl('putObject', params);
|
const url = s3.getSignedUrl('putObject', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'PUT', url,
|
provideRawOutput(['-verbose', '-X', 'PUT', url,
|
||||||
'--upload-file', 'uploadFile'], httpCode => {
|
'--upload-file', 'uploadFile'], httpCode => {
|
||||||
assert.strictEqual(httpCode, '200 OK');
|
expect(httpCode).toBe('200 OK');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
it('should get an object', done => {
|
test('should get an object', done => {
|
||||||
const params = { Bucket: bucket, Key: 'key', Expires:
|
const params = { Bucket: bucket, Key: 'key', Expires:
|
||||||
almostOutsideTime };
|
almostOutsideTime };
|
||||||
const url = s3.getSignedUrl('getObject', params);
|
const url = s3.getSignedUrl('getObject', params);
|
||||||
provideRawOutput(['-verbose', '-o', 'download', url], httpCode => {
|
provideRawOutput(['-verbose', '-o', 'download', url], httpCode => {
|
||||||
assert.strictEqual(httpCode, '200 OK');
|
expect(httpCode).toBe('200 OK');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('downloaded file should equal file that was put', done => {
|
test('downloaded file should equal file that was put', done => {
|
||||||
diff('uploadFile', 'download', () => {
|
diff('uploadFile', 'download', () => {
|
||||||
deleteFile('download', done);
|
deleteFile('download', done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete an object', done => {
|
test('should delete an object', done => {
|
||||||
const params = { Bucket: bucket, Key: 'key', Expires:
|
const params = { Bucket: bucket, Key: 'key', Expires:
|
||||||
almostOutsideTime };
|
almostOutsideTime };
|
||||||
const url = s3.getSignedUrl('deleteObject', params);
|
const url = s3.getSignedUrl('deleteObject', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'DELETE', url],
|
provideRawOutput(['-verbose', '-X', 'DELETE', url],
|
||||||
httpCode => {
|
httpCode => {
|
||||||
assert.strictEqual(httpCode, '204 NO CONTENT');
|
expect(httpCode).toBe('204 NO CONTENT');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should delete a bucket', done => {
|
test('should delete a bucket', done => {
|
||||||
const params = { Bucket: bucket, Expires: almostOutsideTime };
|
const params = { Bucket: bucket, Expires: almostOutsideTime };
|
||||||
const url = s3.getSignedUrl('deleteBucket', params);
|
const url = s3.getSignedUrl('deleteBucket', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'DELETE', url],
|
provideRawOutput(['-verbose', '-X', 'DELETE', url],
|
||||||
httpCode => {
|
httpCode => {
|
||||||
assert.strictEqual(httpCode, '204 NO CONTENT');
|
expect(httpCode).toBe('204 NO CONTENT');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -14,7 +14,7 @@ const bucket = `mybucket-${random}`;
|
||||||
function diff(putFile, receivedFile, done) {
|
function diff(putFile, receivedFile, done) {
|
||||||
process.stdout.write(`diff ${putFile} ${receivedFile}\n`);
|
process.stdout.write(`diff ${putFile} ${receivedFile}\n`);
|
||||||
cp.spawn('diff', [putFile, receivedFile]).on('exit', code => {
|
cp.spawn('diff', [putFile, receivedFile]).on('exit', code => {
|
||||||
assert.strictEqual(code, 0);
|
expect(code).toBe(0);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -26,41 +26,41 @@ function deleteFile(file, callback) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('aws-node-sdk v4auth query tests', function testSuite() {
|
describe('aws-node-sdk v4auth query tests', () => {
|
||||||
this.timeout(60000);
|
this.timeout(60000);
|
||||||
let s3;
|
let s3;
|
||||||
|
|
||||||
// setup test
|
// setup test
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
const config = getConfig('default', { signatureVersion: 'v4' });
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
|
|
||||||
s3 = new S3(config);
|
s3 = new S3(config);
|
||||||
});
|
});
|
||||||
|
|
||||||
// emptyListing test
|
// emptyListing test
|
||||||
it('should do an empty bucket listing', done => {
|
test('should do an empty bucket listing', done => {
|
||||||
const url = s3.getSignedUrl('listBuckets');
|
const url = s3.getSignedUrl('listBuckets');
|
||||||
provideRawOutput(['-verbose', url], httpCode => {
|
provideRawOutput(['-verbose', url], httpCode => {
|
||||||
assert.strictEqual(httpCode, '200 OK');
|
expect(httpCode).toBe('200 OK');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// createBucket test
|
// createBucket test
|
||||||
it('should create a bucket', done => {
|
test('should create a bucket', done => {
|
||||||
const params = { Bucket: bucket };
|
const params = { Bucket: bucket };
|
||||||
const url = s3.getSignedUrl('createBucket', params);
|
const url = s3.getSignedUrl('createBucket', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'PUT', url], httpCode => {
|
provideRawOutput(['-verbose', '-X', 'PUT', url], httpCode => {
|
||||||
assert.strictEqual(httpCode, '200 OK');
|
expect(httpCode).toBe('200 OK');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// fullListing test
|
// fullListing test
|
||||||
it('should do a bucket listing with result', done => {
|
test('should do a bucket listing with result', done => {
|
||||||
const url = s3.getSignedUrl('listBuckets');
|
const url = s3.getSignedUrl('listBuckets');
|
||||||
provideRawOutput(['-verbose', url], (httpCode, rawOutput) => {
|
provideRawOutput(['-verbose', url], (httpCode, rawOutput) => {
|
||||||
assert.strictEqual(httpCode, '200 OK');
|
expect(httpCode).toBe('200 OK');
|
||||||
parseString(rawOutput.stdout, (err, xml) => {
|
parseString(rawOutput.stdout, (err, xml) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
@ -68,24 +68,25 @@ describe('aws-node-sdk v4auth query tests', function testSuite() {
|
||||||
const bucketNames = xml.ListAllMyBucketsResult
|
const bucketNames = xml.ListAllMyBucketsResult
|
||||||
.Buckets[0].Bucket.map(item => item.Name[0]);
|
.Buckets[0].Bucket.map(item => item.Name[0]);
|
||||||
const whereIsMyBucket = bucketNames.indexOf(bucket);
|
const whereIsMyBucket = bucketNames.indexOf(bucket);
|
||||||
assert(whereIsMyBucket > -1);
|
expect(whereIsMyBucket > -1).toBeTruthy();
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// putObject test
|
// putObject test
|
||||||
it('should put an object', done => {
|
test('should put an object', done => {
|
||||||
const params = { Bucket: bucket, Key: 'key' };
|
const params = { Bucket: bucket, Key: 'key' };
|
||||||
const url = s3.getSignedUrl('putObject', params);
|
const url = s3.getSignedUrl('putObject', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'PUT', url,
|
provideRawOutput(['-verbose', '-X', 'PUT', url,
|
||||||
'--upload-file', 'uploadFile'], httpCode => {
|
'--upload-file', 'uploadFile'], httpCode => {
|
||||||
assert.strictEqual(httpCode, '200 OK');
|
expect(httpCode).toBe('200 OK');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put an object with an acl setting and a storage class setting',
|
test(
|
||||||
|
'should put an object with an acl setting and a storage class setting',
|
||||||
done => {
|
done => {
|
||||||
// This will test that upper case query parameters and lowercase
|
// This will test that upper case query parameters and lowercase
|
||||||
// query parameters (i.e., 'x-amz-acl') are being sorted properly.
|
// query parameters (i.e., 'x-amz-acl') are being sorted properly.
|
||||||
|
@ -98,105 +99,106 @@ describe('aws-node-sdk v4auth query tests', function testSuite() {
|
||||||
const url = s3.getSignedUrl('putObject', params);
|
const url = s3.getSignedUrl('putObject', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'PUT', url,
|
provideRawOutput(['-verbose', '-X', 'PUT', url,
|
||||||
'--upload-file', 'uploadFile'], httpCode => {
|
'--upload-file', 'uploadFile'], httpCode => {
|
||||||
assert.strictEqual(httpCode, '200 OK');
|
expect(httpCode).toBe('200 OK');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should put an object with native characters', done => {
|
test('should put an object with native characters', done => {
|
||||||
const Key = 'key-pâtisserie-中文-español-English-हिन्दी-العربية-' +
|
const Key = 'key-pâtisserie-中文-español-English-हिन्दी-العربية-' +
|
||||||
'português-বাংলা-русский-日本語-ਪੰਜਾਬੀ-한국어-தமிழ்';
|
'português-বাংলা-русский-日本語-ਪੰਜਾਬੀ-한국어-தமிழ்';
|
||||||
const params = { Bucket: bucket, Key };
|
const params = { Bucket: bucket, Key };
|
||||||
const url = s3.getSignedUrl('putObject', params);
|
const url = s3.getSignedUrl('putObject', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'PUT', url,
|
provideRawOutput(['-verbose', '-X', 'PUT', url,
|
||||||
'--upload-file', 'uploadFile'], httpCode => {
|
'--upload-file', 'uploadFile'], httpCode => {
|
||||||
assert.strictEqual(httpCode, '200 OK');
|
expect(httpCode).toBe('200 OK');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// listObjects test
|
// listObjects test
|
||||||
it('should list objects in bucket', done => {
|
test('should list objects in bucket', done => {
|
||||||
const params = { Bucket: bucket };
|
const params = { Bucket: bucket };
|
||||||
const url = s3.getSignedUrl('listObjects', params);
|
const url = s3.getSignedUrl('listObjects', params);
|
||||||
provideRawOutput(['-verbose', url], (httpCode, rawOutput) => {
|
provideRawOutput(['-verbose', url], (httpCode, rawOutput) => {
|
||||||
assert.strictEqual(httpCode, '200 OK');
|
expect(httpCode).toBe('200 OK');
|
||||||
parseString(rawOutput.stdout, (err, result) => {
|
parseString(rawOutput.stdout, (err, result) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(result.ListBucketResult
|
expect(result.ListBucketResult
|
||||||
.Contents[0].Key[0], 'key');
|
.Contents[0].Key[0]).toBe('key');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// getObject test
|
// getObject test
|
||||||
it('should get an object', done => {
|
test('should get an object', done => {
|
||||||
const params = { Bucket: bucket, Key: 'key' };
|
const params = { Bucket: bucket, Key: 'key' };
|
||||||
const url = s3.getSignedUrl('getObject', params);
|
const url = s3.getSignedUrl('getObject', params);
|
||||||
provideRawOutput(['-verbose', '-o', 'download', url], httpCode => {
|
provideRawOutput(['-verbose', '-o', 'download', url], httpCode => {
|
||||||
assert.strictEqual(httpCode, '200 OK');
|
expect(httpCode).toBe('200 OK');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('downloaded file should equal file that was put', done => {
|
test('downloaded file should equal file that was put', done => {
|
||||||
diff('uploadFile', 'download', () => {
|
diff('uploadFile', 'download', () => {
|
||||||
deleteFile('download', done);
|
deleteFile('download', done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// deleteObject test
|
// deleteObject test
|
||||||
it('should delete an object', done => {
|
test('should delete an object', done => {
|
||||||
const params = { Bucket: bucket, Key: 'key' };
|
const params = { Bucket: bucket, Key: 'key' };
|
||||||
const url = s3.getSignedUrl('deleteObject', params);
|
const url = s3.getSignedUrl('deleteObject', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'DELETE', url],
|
provideRawOutput(['-verbose', '-X', 'DELETE', url],
|
||||||
httpCode => {
|
httpCode => {
|
||||||
assert.strictEqual(httpCode, '204 NO CONTENT');
|
expect(httpCode).toBe('204 NO CONTENT');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return a 204 on delete of an already deleted object', done => {
|
test('should return a 204 on delete of an already deleted object', done => {
|
||||||
const params = { Bucket: bucket, Key: 'key' };
|
const params = { Bucket: bucket, Key: 'key' };
|
||||||
const url = s3.getSignedUrl('deleteObject', params);
|
const url = s3.getSignedUrl('deleteObject', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'DELETE', url],
|
provideRawOutput(['-verbose', '-X', 'DELETE', url],
|
||||||
httpCode => {
|
httpCode => {
|
||||||
assert.strictEqual(httpCode, '204 NO CONTENT');
|
expect(httpCode).toBe('204 NO CONTENT');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 204 on delete of non-existing object', done => {
|
test('should return 204 on delete of non-existing object', done => {
|
||||||
const params = { Bucket: bucket, Key: 'randomObject' };
|
const params = { Bucket: bucket, Key: 'randomObject' };
|
||||||
const url = s3.getSignedUrl('deleteObject', params);
|
const url = s3.getSignedUrl('deleteObject', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'DELETE', url],
|
provideRawOutput(['-verbose', '-X', 'DELETE', url],
|
||||||
httpCode => {
|
httpCode => {
|
||||||
assert.strictEqual(httpCode, '204 NO CONTENT');
|
expect(httpCode).toBe('204 NO CONTENT');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete an object with native characters', done => {
|
test('should delete an object with native characters', done => {
|
||||||
const Key = 'key-pâtisserie-中文-español-English-हिन्दी-العربية-' +
|
const Key = 'key-pâtisserie-中文-español-English-हिन्दी-العربية-' +
|
||||||
'português-বাংলা-русский-日本語-ਪੰਜਾਬੀ-한국어-தமிழ்';
|
'português-বাংলা-русский-日本語-ਪੰਜਾਬੀ-한국어-தமிழ்';
|
||||||
const params = { Bucket: bucket, Key };
|
const params = { Bucket: bucket, Key };
|
||||||
const url = s3.getSignedUrl('deleteObject', params);
|
const url = s3.getSignedUrl('deleteObject', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'DELETE', url], httpCode => {
|
provideRawOutput(['-verbose', '-X', 'DELETE', url], httpCode => {
|
||||||
assert.strictEqual(httpCode, '204 NO CONTENT');
|
expect(httpCode).toBe('204 NO CONTENT');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// deleteBucket test
|
// deleteBucket test
|
||||||
it('should delete a bucket', done => {
|
test('should delete a bucket', done => {
|
||||||
const params = { Bucket: bucket };
|
const params = { Bucket: bucket };
|
||||||
const url = s3.getSignedUrl('deleteBucket', params);
|
const url = s3.getSignedUrl('deleteBucket', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'DELETE', url],
|
provideRawOutput(['-verbose', '-X', 'DELETE', url],
|
||||||
httpCode => {
|
httpCode => {
|
||||||
assert.strictEqual(httpCode, '204 NO CONTENT');
|
expect(httpCode).toBe('204 NO CONTENT');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -26,36 +26,36 @@ const objectKey = 'toAbort&<>"\'';
|
||||||
// same uploadId
|
// same uploadId
|
||||||
const multipartUploadData = {};
|
const multipartUploadData = {};
|
||||||
|
|
||||||
describe('aws-node-sdk test suite as registered user', function testSuite() {
|
describe('aws-node-sdk test suite as registered user', () => {
|
||||||
this.timeout(60000);
|
this.timeout(60000);
|
||||||
let s3;
|
let s3;
|
||||||
|
|
||||||
// setup test
|
// setup test
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
const config = getConfig('default', { signatureVersion: 'v4' });
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
|
|
||||||
s3 = new S3(config);
|
s3 = new S3(config);
|
||||||
});
|
});
|
||||||
|
|
||||||
// bucketListing test
|
// bucketListing test
|
||||||
it('should do bucket listing', done => {
|
test('should do bucket listing', done => {
|
||||||
s3.listBuckets((err, data) => {
|
s3.listBuckets((err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(new Error(`error listing buckets: ${err}`));
|
return done(new Error(`error listing buckets: ${err}`));
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(data.Buckets, 'No buckets Info sent back');
|
expect(data.Buckets).toBeTruthy();
|
||||||
assert(data.Owner, 'No owner Info sent back');
|
expect(data.Owner).toBeTruthy();
|
||||||
assert(data.Owner.ID, 'Owner ID not sent back');
|
expect(data.Owner.ID).toBeTruthy();
|
||||||
assert(data.Owner.DisplayName, 'DisplayName not sent back');
|
expect(data.Owner.DisplayName).toBeTruthy();
|
||||||
const owner = Object.keys(data.Owner);
|
const owner = Object.keys(data.Owner);
|
||||||
assert.strictEqual(owner.length, 2, 'Too much fields in owner');
|
expect(owner.length).toBe(2);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// createbucket test
|
// createbucket test
|
||||||
it('should create a bucket', done => {
|
test('should create a bucket', done => {
|
||||||
s3.createBucket({ Bucket: bucket }, err => {
|
s3.createBucket({ Bucket: bucket }, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(new Error(`error creating bucket: ${err}`));
|
return done(new Error(`error creating bucket: ${err}`));
|
||||||
|
@ -65,22 +65,23 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
});
|
});
|
||||||
|
|
||||||
// createMPU test
|
// createMPU test
|
||||||
it('should create a multipart upload', done => {
|
test('should create a multipart upload', done => {
|
||||||
s3.createMultipartUpload({ Bucket: bucket, Key: objectKey },
|
s3.createMultipartUpload({ Bucket: bucket, Key: objectKey },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(new Error(
|
return done(new Error(
|
||||||
`error initiating multipart upload: ${err}`));
|
`error initiating multipart upload: ${err}`));
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.Bucket, bucket);
|
expect(data.Bucket).toBe(bucket);
|
||||||
assert.strictEqual(data.Key, objectKey);
|
expect(data.Key).toBe(objectKey);
|
||||||
assert.ok(data.UploadId);
|
expect(data.UploadId).toBeTruthy();
|
||||||
multipartUploadData.firstUploadId = data.UploadId;
|
multipartUploadData.firstUploadId = data.UploadId;
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should upload a part of a multipart upload to be aborted',
|
test(
|
||||||
|
'should upload a part of a multipart upload to be aborted',
|
||||||
// uploadpart test
|
// uploadpart test
|
||||||
done => {
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -94,13 +95,14 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(new Error(`error uploading a part: ${err}`));
|
return done(new Error(`error uploading a part: ${err}`));
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.ETag, `"${calculatedFirstPartHash}"`);
|
expect(data.ETag).toBe(`"${calculatedFirstPartHash}"`);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
// abortMPU test
|
// abortMPU test
|
||||||
it('should abort a multipart upload', done => {
|
test('should abort a multipart upload', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: objectKey,
|
Key: objectKey,
|
||||||
|
@ -111,13 +113,13 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
return done(new Error(
|
return done(new Error(
|
||||||
`error aborting multipart upload: ${err}`));
|
`error aborting multipart upload: ${err}`));
|
||||||
}
|
}
|
||||||
assert.ok(data);
|
expect(data).toBeTruthy();
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// createMPU test
|
// createMPU test
|
||||||
it('should upload a part of a multipart upload', done => {
|
test('should upload a part of a multipart upload', done => {
|
||||||
s3.createMultipartUpload({ Bucket: bucket, Key: 'toComplete' },
|
s3.createMultipartUpload({ Bucket: bucket, Key: 'toComplete' },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -138,16 +140,14 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
return done(
|
return done(
|
||||||
new Error(`error uploading a part: ${err}`));
|
new Error(`error uploading a part: ${err}`));
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.ETag,
|
expect(data.ETag).toBe(`"${calculatedFirstPartHash}"`);
|
||||||
`"${calculatedFirstPartHash}"`);
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
return undefined;
|
return undefined;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should upload a second part of a multipart upload',
|
test('should upload a second part of a multipart upload', // createMPU test
|
||||||
// createMPU test
|
|
||||||
done => {
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -160,13 +160,13 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(new Error(`error uploading a part: ${err}`));
|
return done(new Error(`error uploading a part: ${err}`));
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.ETag, `"${calculatedSecondPartHash}"`);
|
expect(data.ETag).toBe(`"${calculatedSecondPartHash}"`);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// listparts test
|
// listparts test
|
||||||
it('should list the parts of a multipart upload', done => {
|
test('should list the parts of a multipart upload', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: 'toComplete',
|
Key: 'toComplete',
|
||||||
|
@ -176,19 +176,17 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(new Error(`error listing parts: ${err}`));
|
return done(new Error(`error listing parts: ${err}`));
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.Bucket, bucket);
|
expect(data.Bucket).toBe(bucket);
|
||||||
assert.strictEqual(data.Key, 'toComplete');
|
expect(data.Key).toBe('toComplete');
|
||||||
assert.strictEqual(data.UploadId, multipartUploadData
|
expect(data.UploadId).toBe(multipartUploadData
|
||||||
.secondUploadId);
|
.secondUploadId);
|
||||||
assert.strictEqual(data.IsTruncated, false);
|
expect(data.IsTruncated).toBe(false);
|
||||||
assert.strictEqual(data.Parts[0].PartNumber, 1);
|
expect(data.Parts[0].PartNumber).toBe(1);
|
||||||
assert.strictEqual(data.Parts[0].ETag,
|
expect(data.Parts[0].ETag).toBe(`"${calculatedFirstPartHash}"`);
|
||||||
`"${calculatedFirstPartHash}"`);
|
expect(data.Parts[0].Size).toBe(5242880);
|
||||||
assert.strictEqual(data.Parts[0].Size, 5242880);
|
expect(data.Parts[1].PartNumber).toBe(2);
|
||||||
assert.strictEqual(data.Parts[1].PartNumber, 2);
|
expect(data.Parts[1].ETag).toBe(`"${calculatedSecondPartHash}"`);
|
||||||
assert.strictEqual(data.Parts[1].ETag,
|
expect(data.Parts[1].Size).toBe(5242880);
|
||||||
`"${calculatedSecondPartHash}"`);
|
|
||||||
assert.strictEqual(data.Parts[1].Size, 5242880);
|
|
||||||
// Must disable for now when running with Vault
|
// Must disable for now when running with Vault
|
||||||
// since will need to pull actual ARN and canonicalId
|
// since will need to pull actual ARN and canonicalId
|
||||||
// assert.strictEqual(data.Initiator.ID, accessKey1ARN);
|
// assert.strictEqual(data.Initiator.ID, accessKey1ARN);
|
||||||
|
@ -197,13 +195,13 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
// call works. For real implementation with vault,
|
// call works. For real implementation with vault,
|
||||||
// will need the canonicalId.
|
// will need the canonicalId.
|
||||||
// assert.strictEqual(data.Owner.ID, config.accessKeyId);
|
// assert.strictEqual(data.Owner.ID, config.accessKeyId);
|
||||||
assert.strictEqual(data.StorageClass, 'STANDARD');
|
expect(data.StorageClass).toBe('STANDARD');
|
||||||
return {};
|
return {};
|
||||||
});
|
});
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error if do not provide correct ' +
|
test('should return an error if do not provide correct ' +
|
||||||
// completempu test
|
// completempu test
|
||||||
'xml when completing a multipart upload', done => {
|
'xml when completing a multipart upload', done => {
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -212,13 +210,13 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
UploadId: multipartUploadData.secondUploadId,
|
UploadId: multipartUploadData.secondUploadId,
|
||||||
};
|
};
|
||||||
s3.completeMultipartUpload(params, err => {
|
s3.completeMultipartUpload(params, err => {
|
||||||
assert.strictEqual(err.code, 'MalformedXML');
|
expect(err.code).toBe('MalformedXML');
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// completempu test
|
// completempu test
|
||||||
it('should complete a multipart upload', done => {
|
test('should complete a multipart upload', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: 'toComplete',
|
Key: 'toComplete',
|
||||||
|
@ -240,14 +238,14 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(new Error(`error completing mpu: ${err}`));
|
return done(new Error(`error completing mpu: ${err}`));
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.Bucket, bucket);
|
expect(data.Bucket).toBe(bucket);
|
||||||
assert.strictEqual(data.Key, 'toComplete');
|
expect(data.Key).toBe('toComplete');
|
||||||
assert.strictEqual(data.ETag, combinedETag);
|
expect(data.ETag).toBe(combinedETag);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get an object put by multipart upload', done => {
|
test('should get an object put by multipart upload', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: 'toComplete',
|
Key: 'toComplete',
|
||||||
|
@ -257,8 +255,7 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
return done(new Error(
|
return done(new Error(
|
||||||
`error getting object put by mpu: ${err}`));
|
`error getting object put by mpu: ${err}`));
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.ETag,
|
expect(data.ETag).toBe(combinedETag);
|
||||||
combinedETag);
|
|
||||||
const uploadedObj = Buffer.concat([firstBufferBody,
|
const uploadedObj = Buffer.concat([firstBufferBody,
|
||||||
secondBufferBody]);
|
secondBufferBody]);
|
||||||
assert.deepStrictEqual(data.Body, uploadedObj);
|
assert.deepStrictEqual(data.Body, uploadedObj);
|
||||||
|
@ -316,7 +313,7 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
];
|
];
|
||||||
|
|
||||||
mpuRangeGetTests.forEach(test => {
|
mpuRangeGetTests.forEach(test => {
|
||||||
it(test.it, done => {
|
test(test.it, done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: 'toComplete',
|
Key: 'toComplete',
|
||||||
|
@ -327,19 +324,17 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
return done(new Error(
|
return done(new Error(
|
||||||
`error getting object range put by mpu: ${err}`));
|
`error getting object range put by mpu: ${err}`));
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.ContentLength, test.contentLength);
|
expect(data.ContentLength).toBe(test.contentLength);
|
||||||
assert.strictEqual(data.AcceptRanges, 'bytes');
|
expect(data.AcceptRanges).toBe('bytes');
|
||||||
assert.strictEqual(data.ContentRange, test.contentRange);
|
expect(data.ContentRange).toBe(test.contentRange);
|
||||||
assert.strictEqual(data.ETag,
|
expect(data.ETag).toBe(combinedETag);
|
||||||
combinedETag);
|
|
||||||
assert.deepStrictEqual(data.Body, test.expectedBuff);
|
assert.deepStrictEqual(data.Body, test.expectedBuff);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete object created by multipart upload',
|
test('should delete object created by multipart upload', // deleteObject test
|
||||||
// deleteObject test
|
|
||||||
done => {
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -349,12 +344,12 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(new Error(`error deleting object: ${err}`));
|
return done(new Error(`error deleting object: ${err}`));
|
||||||
}
|
}
|
||||||
assert.ok(data);
|
expect(data).toBeTruthy();
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put an object regularly (non-MPU)', done => {
|
test('should put an object regularly (non-MPU)', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: 'normalput',
|
Key: 'normalput',
|
||||||
|
@ -365,22 +360,21 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
return done(new Error(
|
return done(new Error(
|
||||||
`error putting object regularly: ${err}`));
|
`error putting object regularly: ${err}`));
|
||||||
}
|
}
|
||||||
assert.ok(data);
|
expect(data).toBeTruthy();
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRange if the range of the resource does ' +
|
test('should return InvalidRange if the range of the resource does ' +
|
||||||
'not cover the byte range',
|
'not cover the byte range', done => {
|
||||||
done => {
|
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: 'normalput',
|
Key: 'normalput',
|
||||||
Range: 'bytes=200-200',
|
Range: 'bytes=200-200',
|
||||||
};
|
};
|
||||||
s3.getObject(params, err => {
|
s3.getObject(params, err => {
|
||||||
assert.notEqual(err, null, 'Expected failure but got success');
|
expect(err).not.toEqual(null);
|
||||||
assert.strictEqual(err.code, 'InvalidRange');
|
expect(err.code).toBe('InvalidRange');
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -420,9 +414,8 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
});
|
});
|
||||||
testsRangeOnEmptyFile.forEach(test => {
|
testsRangeOnEmptyFile.forEach(test => {
|
||||||
const validText = test.valid ? 'InvalidRange error' : 'empty file';
|
const validText = test.valid ? 'InvalidRange error' : 'empty file';
|
||||||
it(`should return ${validText} if get range ${test.range} on ` +
|
test(`should return ${validText} if get range ${test.range} on ` +
|
||||||
'empty object',
|
'empty object', done => {
|
||||||
done => {
|
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucketEmptyObj,
|
Bucket: bucketEmptyObj,
|
||||||
Key: 'emptyobj',
|
Key: 'emptyobj',
|
||||||
|
@ -430,13 +423,11 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
};
|
};
|
||||||
s3.getObject(params, (err, data) => {
|
s3.getObject(params, (err, data) => {
|
||||||
if (test.valid) {
|
if (test.valid) {
|
||||||
assert.notEqual(err, null, 'Expected failure but ' +
|
expect(err).not.toEqual(null);
|
||||||
'got success');
|
expect(err.code).toBe('InvalidRange');
|
||||||
assert.strictEqual(err.code, 'InvalidRange');
|
|
||||||
} else {
|
} else {
|
||||||
assert.equal(err, null, 'Expected success but ' +
|
expect(err).toEqual(null);
|
||||||
`got failure: ${err}`);
|
expect(data.Body.toString()).toBe('');
|
||||||
assert.strictEqual(data.Body.toString(), '');
|
|
||||||
}
|
}
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
|
@ -477,7 +468,7 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
];
|
];
|
||||||
|
|
||||||
regularObjectRangeGetTests.forEach(test => {
|
regularObjectRangeGetTests.forEach(test => {
|
||||||
it(test.it, done => {
|
test(test.it, done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: 'normalput',
|
Key: 'normalput',
|
||||||
|
@ -488,17 +479,16 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
return done(new Error(
|
return done(new Error(
|
||||||
`error getting object range: ${err}`));
|
`error getting object range: ${err}`));
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.AcceptRanges, 'bytes');
|
expect(data.AcceptRanges).toBe('bytes');
|
||||||
assert.strictEqual(data.ContentLength, test.contentLength);
|
expect(data.ContentLength).toBe(test.contentLength);
|
||||||
assert.strictEqual(data.ContentRange, test.contentRange);
|
expect(data.ContentRange).toBe(test.contentRange);
|
||||||
assert.deepStrictEqual(data.Body, test.expectedBuff);
|
assert.deepStrictEqual(data.Body, test.expectedBuff);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete an object put without MPU',
|
test('should delete an object put without MPU', // deleteObject test
|
||||||
// deleteObject test
|
|
||||||
done => {
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -508,13 +498,13 @@ describe('aws-node-sdk test suite as registered user', function testSuite() {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(new Error(`error deleting object: ${err}`));
|
return done(new Error(`error deleting object: ${err}`));
|
||||||
}
|
}
|
||||||
assert.ok(data);
|
expect(data).toBeTruthy();
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// deletebucket test
|
// deletebucket test
|
||||||
it('should delete a bucket', done => {
|
test('should delete a bucket', done => {
|
||||||
s3.deleteBucket({ Bucket: bucket }, err => {
|
s3.deleteBucket({ Bucket: bucket }, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(new Error(`error deleting bucket: ${err}`));
|
return done(new Error(`error deleting bucket: ${err}`));
|
||||||
|
|
|
@ -10,7 +10,7 @@ const updatedUserMetadata = { food: 'cake' };
|
||||||
|
|
||||||
runIfMongo('Basic search', () => {
|
runIfMongo('Basic search', () => {
|
||||||
const bucketName = `basicsearchmebucket${Date.now()}`;
|
const bucketName = `basicsearchmebucket${Date.now()}`;
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
s3Client.createBucket({ Bucket: bucketName }, err => {
|
s3Client.createBucket({ Bucket: bucketName }, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
|
@ -26,7 +26,7 @@ runIfMongo('Basic search', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
s3Client.deleteObjects({ Bucket: bucketName, Delete: { Objects: [
|
s3Client.deleteObjects({ Bucket: bucketName, Delete: { Objects: [
|
||||||
{ Key: objectKey },
|
{ Key: objectKey },
|
||||||
{ Key: hiddenKey }],
|
{ Key: hiddenKey }],
|
||||||
|
@ -39,46 +39,48 @@ runIfMongo('Basic search', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list object with searched for system metadata', done => {
|
test('should list object with searched for system metadata', done => {
|
||||||
const encodedSearch = encodeURIComponent(`key="${objectKey}"`);
|
const encodedSearch = encodeURIComponent(`key="${objectKey}"`);
|
||||||
return runAndCheckSearch(s3Client, bucketName,
|
return runAndCheckSearch(s3Client, bucketName,
|
||||||
encodedSearch, objectKey, done);
|
encodedSearch, objectKey, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list object with regex searched for system metadata', done => {
|
test('should list object with regex searched for system metadata', done => {
|
||||||
const encodedSearch = encodeURIComponent('key LIKE "find.*"');
|
const encodedSearch = encodeURIComponent('key LIKE "find.*"');
|
||||||
return runAndCheckSearch(s3Client, bucketName,
|
return runAndCheckSearch(s3Client, bucketName,
|
||||||
encodedSearch, objectKey, done);
|
encodedSearch, objectKey, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list object with regex searched for system metadata with flags',
|
test(
|
||||||
|
'should list object with regex searched for system metadata with flags',
|
||||||
done => {
|
done => {
|
||||||
const encodedSearch = encodeURIComponent('key LIKE "/FIND.*/i"');
|
const encodedSearch = encodeURIComponent('key LIKE "/FIND.*/i"');
|
||||||
return runAndCheckSearch(s3Client, bucketName,
|
return runAndCheckSearch(s3Client, bucketName,
|
||||||
encodedSearch, objectKey, done);
|
encodedSearch, objectKey, done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return empty when no object match regex', done => {
|
test('should return empty when no object match regex', done => {
|
||||||
const encodedSearch = encodeURIComponent('key LIKE "/NOTFOUND.*/i"');
|
const encodedSearch = encodeURIComponent('key LIKE "/NOTFOUND.*/i"');
|
||||||
return runAndCheckSearch(s3Client, bucketName,
|
return runAndCheckSearch(s3Client, bucketName,
|
||||||
encodedSearch, null, done);
|
encodedSearch, null, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list object with searched for user metadata', done => {
|
test('should list object with searched for user metadata', done => {
|
||||||
const encodedSearch =
|
const encodedSearch =
|
||||||
encodeURIComponent(`x-amz-meta-food="${userMetadata.food}"`);
|
encodeURIComponent(`x-amz-meta-food="${userMetadata.food}"`);
|
||||||
return runAndCheckSearch(s3Client, bucketName, encodedSearch,
|
return runAndCheckSearch(s3Client, bucketName, encodedSearch,
|
||||||
objectKey, done);
|
objectKey, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list object with searched for tag metadata', done => {
|
test('should list object with searched for tag metadata', done => {
|
||||||
const encodedSearch =
|
const encodedSearch =
|
||||||
encodeURIComponent('tags.item-type="main"');
|
encodeURIComponent('tags.item-type="main"');
|
||||||
return runAndCheckSearch(s3Client, bucketName, encodedSearch,
|
return runAndCheckSearch(s3Client, bucketName, encodedSearch,
|
||||||
objectKey, done);
|
objectKey, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return empty listing when no object has user md', done => {
|
test('should return empty listing when no object has user md', done => {
|
||||||
const encodedSearch =
|
const encodedSearch =
|
||||||
encodeURIComponent('x-amz-meta-food="nosuchfood"');
|
encodeURIComponent('x-amz-meta-food="nosuchfood"');
|
||||||
return runAndCheckSearch(s3Client, bucketName,
|
return runAndCheckSearch(s3Client, bucketName,
|
||||||
|
@ -86,13 +88,12 @@ runIfMongo('Basic search', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('search when overwrite object', () => {
|
describe('search when overwrite object', () => {
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
s3Client.putObject({ Bucket: bucketName, Key: objectKey,
|
s3Client.putObject({ Bucket: bucketName, Key: objectKey,
|
||||||
Metadata: updatedUserMetadata }, done);
|
Metadata: updatedUserMetadata }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list object with searched for updated user metadata',
|
test('should list object with searched for updated user metadata', done => {
|
||||||
done => {
|
|
||||||
const encodedSearch =
|
const encodedSearch =
|
||||||
encodeURIComponent('x-amz-meta-food' +
|
encodeURIComponent('x-amz-meta-food' +
|
||||||
`="${updatedUserMetadata.food}"`);
|
`="${updatedUserMetadata.food}"`);
|
||||||
|
@ -104,15 +105,15 @@ runIfMongo('Basic search', () => {
|
||||||
|
|
||||||
runIfMongo('Search when no objects in bucket', () => {
|
runIfMongo('Search when no objects in bucket', () => {
|
||||||
const bucketName = `noobjectbucket${Date.now()}`;
|
const bucketName = `noobjectbucket${Date.now()}`;
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
s3Client.createBucket({ Bucket: bucketName }, done);
|
s3Client.createBucket({ Bucket: bucketName }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
s3Client.deleteBucket({ Bucket: bucketName }, done);
|
s3Client.deleteBucket({ Bucket: bucketName }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return empty listing when no objects in bucket', done => {
|
test('should return empty listing when no objects in bucket', done => {
|
||||||
const encodedSearch = encodeURIComponent(`key="${objectKey}"`);
|
const encodedSearch = encodeURIComponent(`key="${objectKey}"`);
|
||||||
return runAndCheckSearch(s3Client, bucketName,
|
return runAndCheckSearch(s3Client, bucketName,
|
||||||
encodedSearch, null, done);
|
encodedSearch, null, done);
|
||||||
|
@ -121,15 +122,15 @@ runIfMongo('Search when no objects in bucket', () => {
|
||||||
|
|
||||||
runIfMongo('Invalid regular expression searches', () => {
|
runIfMongo('Invalid regular expression searches', () => {
|
||||||
const bucketName = `badregex-${Date.now()}`;
|
const bucketName = `badregex-${Date.now()}`;
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
s3Client.createBucket({ Bucket: bucketName }, done);
|
s3Client.createBucket({ Bucket: bucketName }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
s3Client.deleteBucket({ Bucket: bucketName }, done);
|
s3Client.deleteBucket({ Bucket: bucketName }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if pattern is invalid', done => {
|
test('should return error if pattern is invalid', done => {
|
||||||
const encodedSearch = encodeURIComponent('key LIKE "/((helloworld/"');
|
const encodedSearch = encodeURIComponent('key LIKE "/((helloworld/"');
|
||||||
const testError = {
|
const testError = {
|
||||||
code: 'InvalidArgument',
|
code: 'InvalidArgument',
|
||||||
|
|
|
@ -28,18 +28,18 @@ testUtils.runAndCheckSearch = (s3Client, bucketName, encodedSearch,
|
||||||
});
|
});
|
||||||
searchRequest.on('success', res => {
|
searchRequest.on('success', res => {
|
||||||
if (testResult) {
|
if (testResult) {
|
||||||
assert(res.data.Contents[0], 'should be Contents listed');
|
expect(res.data.Contents[0]).toBeTruthy();
|
||||||
assert.strictEqual(res.data.Contents[0].Key, testResult);
|
expect(res.data.Contents[0].Key).toBe(testResult);
|
||||||
assert.strictEqual(res.data.Contents.length, 1);
|
expect(res.data.Contents.length).toBe(1);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(res.data.Contents.length, 0);
|
expect(res.data.Contents.length).toBe(0);
|
||||||
}
|
}
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
searchRequest.on('error', err => {
|
searchRequest.on('error', err => {
|
||||||
if (testResult) {
|
if (testResult) {
|
||||||
assert.strictEqual(err.code, testResult.code);
|
expect(err.code).toBe(testResult.code);
|
||||||
assert.strictEqual(err.message, testResult.message);
|
expect(err.message).toBe(testResult.message);
|
||||||
}
|
}
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
|
|
|
@ -12,7 +12,7 @@ runIfMongo('Search in version enabled bucket', () => {
|
||||||
MFADelete: 'Disabled',
|
MFADelete: 'Disabled',
|
||||||
Status: 'Enabled',
|
Status: 'Enabled',
|
||||||
};
|
};
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
s3Client.createBucket({ Bucket: bucketName }, err => {
|
s3Client.createBucket({ Bucket: bucketName }, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
|
@ -28,7 +28,7 @@ runIfMongo('Search in version enabled bucket', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
removeAllVersions(s3Client, bucketName,
|
removeAllVersions(s3Client, bucketName,
|
||||||
err => {
|
err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -38,7 +38,7 @@ runIfMongo('Search in version enabled bucket', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list just master object with searched for metadata', done => {
|
test('should list just master object with searched for metadata', done => {
|
||||||
const encodedSearch =
|
const encodedSearch =
|
||||||
encodeURIComponent(`x-amz-meta-food="${userMetadata.food}"`);
|
encodeURIComponent(`x-amz-meta-food="${userMetadata.food}"`);
|
||||||
return runAndCheckSearch(s3Client, bucketName,
|
return runAndCheckSearch(s3Client, bucketName,
|
||||||
|
@ -46,12 +46,12 @@ runIfMongo('Search in version enabled bucket', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('New version overwrite', () => {
|
describe('New version overwrite', () => {
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
s3Client.putObject({ Bucket: bucketName,
|
s3Client.putObject({ Bucket: bucketName,
|
||||||
Key: masterKey, Metadata: updatedMetadata }, done);
|
Key: masterKey, Metadata: updatedMetadata }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list just master object with updated metadata', done => {
|
test('should list just master object with updated metadata', done => {
|
||||||
const encodedSearch =
|
const encodedSearch =
|
||||||
encodeURIComponent(`x-amz-meta-food="${updatedMetadata.food}"`);
|
encodeURIComponent(`x-amz-meta-food="${updatedMetadata.food}"`);
|
||||||
return runAndCheckSearch(s3Client, bucketName,
|
return runAndCheckSearch(s3Client, bucketName,
|
||||||
|
|
|
@ -58,8 +58,7 @@ testAcp.addGrantee('Group', constants.publicId, 'READ');
|
||||||
function putObjectAcl(s3, key, versionId, acp, cb) {
|
function putObjectAcl(s3, key, versionId, acp, cb) {
|
||||||
s3.putObjectAcl({ Bucket: bucket, Key: key, AccessControlPolicy: acp,
|
s3.putObjectAcl({ Bucket: bucket, Key: key, AccessControlPolicy: acp,
|
||||||
VersionId: versionId }, err => {
|
VersionId: versionId }, err => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`putting object acl, got error ${err}`);
|
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -67,8 +66,7 @@ function putObjectAcl(s3, key, versionId, acp, cb) {
|
||||||
function putObjectAndAcl(s3, key, body, acp, cb) {
|
function putObjectAndAcl(s3, key, body, acp, cb) {
|
||||||
s3.putObject({ Bucket: bucket, Key: key, Body: body },
|
s3.putObject({ Bucket: bucket, Key: key, Body: body },
|
||||||
(err, putData) => {
|
(err, putData) => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`putting object, got error ${err}`);
|
|
||||||
putObjectAcl(s3, key, putData.VersionId, acp, () =>
|
putObjectAcl(s3, key, putData.VersionId, acp, () =>
|
||||||
cb(null, putData.VersionId));
|
cb(null, putData.VersionId));
|
||||||
});
|
});
|
||||||
|
@ -91,8 +89,7 @@ function putVersionsWithAclToAws(s3, key, data, acps, cb) {
|
||||||
async.timesLimit(data.length, 1, (i, next) => {
|
async.timesLimit(data.length, 1, (i, next) => {
|
||||||
putObjectAndAcl(s3, key, data[i], acps[i], next);
|
putObjectAndAcl(s3, key, data[i], acps[i], next);
|
||||||
}, (err, results) => {
|
}, (err, results) => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`putting versions with acl, got error ${err}`);
|
|
||||||
cb(null, results);
|
cb(null, results);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -105,9 +102,8 @@ function getObjectAndAssertAcl(s3, params, cb) {
|
||||||
() => {
|
() => {
|
||||||
s3.getObjectAcl({ Bucket: bucket, Key: key, VersionId: versionId },
|
s3.getObjectAcl({ Bucket: bucket, Key: key, VersionId: versionId },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`getting object acl, got error ${err}`);
|
expect(data).toEqual(expectedResult);
|
||||||
assert.deepEqual(data, expectedResult);
|
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -133,8 +129,7 @@ function getObjectsAndAssertAcls(s3, key, versionIds, expectedData,
|
||||||
getObjectAndAssertAcl(s3, { bucket, key, versionId, body,
|
getObjectAndAssertAcl(s3, { bucket, key, versionId, body,
|
||||||
expectedResult, expectedVersionId: versionId }, next);
|
expectedResult, expectedVersionId: versionId }, next);
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`getting object acls, got error ${err}`);
|
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -175,19 +170,18 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning not configured: should put/get acl successfully when ' +
|
test('versioning not configured: should put/get acl successfully when ' +
|
||||||
'versioning not configured', done => {
|
'versioning not configured', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
putObjectAndAcl(s3, key, someBody, testAcp, (err, versionId) => {
|
putObjectAndAcl(s3, key, someBody, testAcp, (err, versionId) => {
|
||||||
assert.strictEqual(versionId, undefined);
|
expect(versionId).toBe(undefined);
|
||||||
getObjectAndAssertAcl(s3, { bucket, key, body: someBody,
|
getObjectAndAssertAcl(s3, { bucket, key, body: someBody,
|
||||||
expectedResult: testAcp }, done);
|
expectedResult: testAcp }, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning suspended then enabled: should put/get acl on null ' +
|
test('versioning suspended then enabled: should put/get acl on null ' +
|
||||||
'version successfully even when latest version is not null version',
|
'version successfully even when latest version is not null version', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => putNullVersionsToAws(s3, bucket, key, [undefined],
|
next => putNullVersionsToAws(s3, bucket, key, [undefined],
|
||||||
|
@ -201,8 +195,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning enabled: should get correct acl using version IDs',
|
test('versioning enabled: should get correct acl using version IDs', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const acps = ['READ', 'FULL_CONTROL', 'READ_ACP', 'WRITE_ACP']
|
const acps = ['READ', 'FULL_CONTROL', 'READ_ACP', 'WRITE_ACP']
|
||||||
.map(perm => {
|
.map(perm => {
|
||||||
|
@ -226,7 +219,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning enabled: should get correct acl when getting ' +
|
test('versioning enabled: should get correct acl when getting ' +
|
||||||
'without version ID', done => {
|
'without version ID', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const acps = ['READ', 'FULL_CONTROL', 'READ_ACP', 'WRITE_ACP']
|
const acps = ['READ', 'FULL_CONTROL', 'READ_ACP', 'WRITE_ACP']
|
||||||
|
|
|
@ -26,7 +26,7 @@ describeSkipIfNotMultiple('Multiple backend delete', () => {
|
||||||
let bucketUtil;
|
let bucketUtil;
|
||||||
let s3;
|
let s3;
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
process.stdout.write('Creating bucket\n');
|
process.stdout.write('Creating bucket\n');
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
|
@ -78,7 +78,7 @@ describeSkipIfNotMultiple('Multiple backend delete', () => {
|
||||||
throw err;
|
throw err;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('Deleting bucket\n');
|
process.stdout.write('Deleting bucket\n');
|
||||||
return bucketUtil.deleteOne(bucket)
|
return bucketUtil.deleteOne(bucket)
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
|
@ -87,69 +87,57 @@ describeSkipIfNotMultiple('Multiple backend delete', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete object from mem', done => {
|
test('should delete object from mem', done => {
|
||||||
s3.deleteObject({ Bucket: bucket, Key: memObject }, err => {
|
s3.deleteObject({ Bucket: bucket, Key: memObject }, err => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
s3.getObject({ Bucket: bucket, Key: memObject }, err => {
|
s3.getObject({ Bucket: bucket, Key: memObject }, err => {
|
||||||
assert.strictEqual(err.code, 'NoSuchKey', 'Expected ' +
|
expect(err.code).toBe('NoSuchKey');
|
||||||
'error but got success');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should delete object from file', done => {
|
test('should delete object from file', done => {
|
||||||
s3.deleteObject({ Bucket: bucket, Key: fileObject }, err => {
|
s3.deleteObject({ Bucket: bucket, Key: fileObject }, err => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
s3.getObject({ Bucket: bucket, Key: fileObject }, err => {
|
s3.getObject({ Bucket: bucket, Key: fileObject }, err => {
|
||||||
assert.strictEqual(err.code, 'NoSuchKey', 'Expected ' +
|
expect(err.code).toBe('NoSuchKey');
|
||||||
'error but got success');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should delete object from AWS', done => {
|
test('should delete object from AWS', done => {
|
||||||
s3.deleteObject({ Bucket: bucket, Key: awsObject }, err => {
|
s3.deleteObject({ Bucket: bucket, Key: awsObject }, err => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
s3.getObject({ Bucket: bucket, Key: awsObject }, err => {
|
s3.getObject({ Bucket: bucket, Key: awsObject }, err => {
|
||||||
assert.strictEqual(err.code, 'NoSuchKey', 'Expected ' +
|
expect(err.code).toBe('NoSuchKey');
|
||||||
'error but got success');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should delete 0-byte object from AWS', done => {
|
test('should delete 0-byte object from AWS', done => {
|
||||||
s3.deleteObject({ Bucket: bucket, Key: emptyObject }, err => {
|
s3.deleteObject({ Bucket: bucket, Key: emptyObject }, err => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
s3.getObject({ Bucket: bucket, Key: emptyObject }, err => {
|
s3.getObject({ Bucket: bucket, Key: emptyObject }, err => {
|
||||||
assert.strictEqual(err.code, 'NoSuchKey', 'Expected ' +
|
expect(err.code).toBe('NoSuchKey');
|
||||||
'error but got success');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should delete large object from AWS', done => {
|
test('should delete large object from AWS', done => {
|
||||||
s3.deleteObject({ Bucket: bucket, Key: bigObject }, err => {
|
s3.deleteObject({ Bucket: bucket, Key: bigObject }, err => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
s3.getObject({ Bucket: bucket, Key: bigObject }, err => {
|
s3.getObject({ Bucket: bucket, Key: bigObject }, err => {
|
||||||
assert.strictEqual(err.code, 'NoSuchKey', 'Expected ' +
|
expect(err.code).toBe('NoSuchKey');
|
||||||
'error but got success');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should delete object from AWS location with bucketMatch set to ' +
|
test('should delete object from AWS location with bucketMatch set to ' +
|
||||||
'false', done => {
|
'false', done => {
|
||||||
s3.deleteObject({ Bucket: bucket, Key: mismatchObject }, err => {
|
s3.deleteObject({ Bucket: bucket, Key: mismatchObject }, err => {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
s3.getObject({ Bucket: bucket, Key: mismatchObject }, err => {
|
s3.getObject({ Bucket: bucket, Key: mismatchObject }, err => {
|
||||||
assert.strictEqual(err.code, 'NoSuchKey',
|
expect(err.code).toBe('NoSuchKey');
|
||||||
'Expected error but got success');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -47,17 +47,16 @@ function _assertDeleteResult(result, resultType, requestVersionId) {
|
||||||
const [expectVersionId, matchReqVersionId, expectDeleteMarker] =
|
const [expectVersionId, matchReqVersionId, expectDeleteMarker] =
|
||||||
_deleteResultSchema[resultType];
|
_deleteResultSchema[resultType];
|
||||||
if (expectVersionId && matchReqVersionId) {
|
if (expectVersionId && matchReqVersionId) {
|
||||||
assert.strictEqual(result.VersionId, requestVersionId);
|
expect(result.VersionId).toBe(requestVersionId);
|
||||||
} else if (expectVersionId) {
|
} else if (expectVersionId) {
|
||||||
assert(result.VersionId, 'expected version id in result');
|
expect(result.VersionId).toBeTruthy();
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(result.VersionId, undefined,
|
expect(result.VersionId).toBe(undefined);
|
||||||
`did not expect version id in result, got "${result.VersionId}"`);
|
|
||||||
}
|
}
|
||||||
if (expectDeleteMarker) {
|
if (expectDeleteMarker) {
|
||||||
assert.strictEqual(result.DeleteMarker, 'true');
|
expect(result.DeleteMarker).toBe('true');
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(result.DeleteMarker, undefined);
|
expect(result.DeleteMarker).toBe(undefined);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,13 +65,12 @@ function delAndAssertResult(s3, params, cb) {
|
||||||
return s3.deleteObject({ Bucket: bucket, Key: key, VersionId:
|
return s3.deleteObject({ Bucket: bucket, Key: key, VersionId:
|
||||||
versionId }, (err, result) => {
|
versionId }, (err, result) => {
|
||||||
if (resultError) {
|
if (resultError) {
|
||||||
assert(err, `expected ${resultError} but found no error`);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, resultError);
|
expect(err.code).toBe(resultError);
|
||||||
assert.strictEqual(err.statusCode, errors[resultError].code);
|
expect(err.statusCode).toBe(errors[resultError].code);
|
||||||
return cb(null);
|
return cb(null);
|
||||||
}
|
}
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`deleting object, got error ${err}`);
|
|
||||||
_assertDeleteResult(result, resultType, versionId);
|
_assertDeleteResult(result, resultType, versionId);
|
||||||
return cb(null, result.VersionId);
|
return cb(null, result.VersionId);
|
||||||
});
|
});
|
||||||
|
@ -96,8 +94,8 @@ function _getAssertDeleted(s3, params, cb) {
|
||||||
const { key, versionId, errorCode } = params;
|
const { key, versionId, errorCode } = params;
|
||||||
return s3.getObject({ Bucket: bucket, Key: key, VersionId: versionId },
|
return s3.getObject({ Bucket: bucket, Key: key, VersionId: versionId },
|
||||||
err => {
|
err => {
|
||||||
assert.strictEqual(err.code, errorCode);
|
expect(err.code).toBe(errorCode);
|
||||||
assert.strictEqual(err.statusCode, 404);
|
expect(err.statusCode).toBe(404);
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -105,8 +103,8 @@ function _getAssertDeleted(s3, params, cb) {
|
||||||
function _awsGetAssertDeleted(params, cb) {
|
function _awsGetAssertDeleted(params, cb) {
|
||||||
const { key, versionId, errorCode } = params;
|
const { key, versionId, errorCode } = params;
|
||||||
return getAwsRetry({ key, versionId }, 0, err => {
|
return getAwsRetry({ key, versionId }, 0, err => {
|
||||||
assert.strictEqual(err.code, errorCode);
|
expect(err.code).toBe(errorCode);
|
||||||
assert.strictEqual(err.statusCode, 404);
|
expect(err.statusCode).toBe(404);
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -142,7 +140,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning not configured: if specifying "null" version, should ' +
|
test('versioning not configured: if specifying "null" version, should ' +
|
||||||
'delete specific version in AWS backend', done => {
|
'delete specific version in AWS backend', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -160,7 +158,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning not configured: specifying any version id other ' +
|
test('versioning not configured: specifying any version id other ' +
|
||||||
'than null should not result in its deletion in AWS backend', done => {
|
'than null should not result in its deletion in AWS backend', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -172,13 +170,13 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
'InvalidArgument' }, err => next(err, awsVerId)),
|
'InvalidArgument' }, err => next(err, awsVerId)),
|
||||||
(awsVerId, next) => awsGetLatestVerId(key, someBody,
|
(awsVerId, next) => awsGetLatestVerId(key, someBody,
|
||||||
(err, resultVid) => {
|
(err, resultVid) => {
|
||||||
assert.strictEqual(resultVid, awsVerId);
|
expect(resultVid).toBe(awsVerId);
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning suspended: should delete a specific version in AWS ' +
|
test('versioning suspended: should delete a specific version in AWS ' +
|
||||||
'backend successfully', done => {
|
'backend successfully', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -196,7 +194,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning enabled: should delete a specific version in AWS ' +
|
test('versioning enabled: should delete a specific version in AWS ' +
|
||||||
'backend successfully', done => {
|
'backend successfully', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -215,7 +213,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning not configured: deleting existing object should ' +
|
test('versioning not configured: deleting existing object should ' +
|
||||||
'not return version id or x-amz-delete-marker: true but should ' +
|
'not return version id or x-amz-delete-marker: true but should ' +
|
||||||
'create a delete marker in aws ', done => {
|
'create a delete marker in aws ', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
|
@ -231,7 +229,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning suspended: should create a delete marker in s3 ' +
|
test('versioning suspended: should create a delete marker in s3 ' +
|
||||||
'and aws successfully when deleting existing object', done => {
|
'and aws successfully when deleting existing object', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -249,10 +247,9 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
// NOTE: Normal deletes when versioning is suspended create a
|
// NOTE: Normal deletes when versioning is suspended create a
|
||||||
// delete marker with the version id "null", which overwrites an
|
// delete marker with the version id "null", which overwrites an
|
||||||
// existing null version in s3 metadata.
|
// existing null version in s3 metadata.
|
||||||
it('versioning suspended: creating a delete marker will overwrite an ' +
|
test('versioning suspended: creating a delete marker will overwrite an ' +
|
||||||
'existing null version that is the latest version in s3 metadata,' +
|
'existing null version that is the latest version in s3 metadata,' +
|
||||||
' but the data of the first null version will remain in AWS',
|
' but the data of the first null version will remain in AWS', done => {
|
||||||
function itF(done) {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => putNullVersionsToAws(s3, bucket, key, [someBody],
|
next => putNullVersionsToAws(s3, bucket, key, [someBody],
|
||||||
|
@ -275,7 +272,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
// get directly to aws however will give us first null version
|
// get directly to aws however will give us first null version
|
||||||
next => awsGetLatestVerId(key, someBody, next),
|
next => awsGetLatestVerId(key, someBody, next),
|
||||||
(awsLatestVid, next) => {
|
(awsLatestVid, next) => {
|
||||||
assert.strictEqual(awsLatestVid, this.test.awsNullVid);
|
expect(awsLatestVid).toBe(this.test.awsNullVid);
|
||||||
next();
|
next();
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
|
@ -284,10 +281,9 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
// NOTE: Normal deletes when versioning is suspended create a
|
// NOTE: Normal deletes when versioning is suspended create a
|
||||||
// delete marker with the version id "null" which is supposed to
|
// delete marker with the version id "null" which is supposed to
|
||||||
// overwrite any existing null version.
|
// overwrite any existing null version.
|
||||||
it('versioning suspended: creating a delete marker will overwrite an ' +
|
test('versioning suspended: creating a delete marker will overwrite an ' +
|
||||||
'existing null version that is not the latest version in s3 metadata,' +
|
'existing null version that is not the latest version in s3 metadata,' +
|
||||||
' but the data of the first null version will remain in AWS',
|
' but the data of the first null version will remain in AWS', done => {
|
||||||
function itF(done) {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const data = [undefined, 'data1'];
|
const data = [undefined, 'data1'];
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -329,13 +325,13 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
// when getting the latest version in AWS now
|
// when getting the latest version in AWS now
|
||||||
next => awsGetLatestVerId(key, '', next),
|
next => awsGetLatestVerId(key, '', next),
|
||||||
(awsLatestVid, next) => {
|
(awsLatestVid, next) => {
|
||||||
assert.strictEqual(awsLatestVid, this.test.awsNullVid);
|
expect(awsLatestVid).toBe(this.test.awsNullVid);
|
||||||
next();
|
next();
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning enabled: should create a delete marker in s3 and ' +
|
test('versioning enabled: should create a delete marker in s3 and ' +
|
||||||
'aws successfully when deleting existing object', done => {
|
'aws successfully when deleting existing object', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -350,7 +346,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning enabled: should delete a delete marker in s3 and ' +
|
test('versioning enabled: should delete a delete marker in s3 and ' +
|
||||||
'aws successfully', done => {
|
'aws successfully', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -372,7 +368,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('multiple delete markers: should be able to get pre-existing ' +
|
test('multiple delete markers: should be able to get pre-existing ' +
|
||||||
'versions after creating and deleting several delete markers', done => {
|
'versions after creating and deleting several delete markers', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -390,7 +386,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('multiple delete markers: should get NoSuchObject if only ' +
|
test('multiple delete markers: should get NoSuchObject if only ' +
|
||||||
'one of the delete markers is deleted', done => {
|
'one of the delete markers is deleted', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -408,7 +404,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get the new latest version after deleting the latest' +
|
test('should get the new latest version after deleting the latest' +
|
||||||
'specific version', done => {
|
'specific version', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const data = [...Array(4).keys()].map(i => i.toString());
|
const data = [...Array(4).keys()].map(i => i.toString());
|
||||||
|
@ -441,7 +437,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete the correct version even if other versions or ' +
|
test('should delete the correct version even if other versions or ' +
|
||||||
'delete markers put directly on aws', done => {
|
'delete markers put directly on aws', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -469,7 +465,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not return an error deleting a version that was already ' +
|
test('should not return an error deleting a version that was already ' +
|
||||||
'deleted directly from AWS backend', done => {
|
'deleted directly from AWS backend', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -528,7 +524,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning not configured: deleting non-existing object should ' +
|
test('versioning not configured: deleting non-existing object should ' +
|
||||||
'not return version id or x-amz-delete-marker: true nor create a ' +
|
'not return version id or x-amz-delete-marker: true nor create a ' +
|
||||||
'delete marker in aws ', done => {
|
'delete marker in aws ', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
|
@ -542,7 +538,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning suspended: should create a delete marker in s3 ' +
|
test('versioning suspended: should create a delete marker in s3 ' +
|
||||||
'and aws successfully when deleting non-existing object', done => {
|
'and aws successfully when deleting non-existing object', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -556,7 +552,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning enabled: should create a delete marker in s3 and ' +
|
test('versioning enabled: should create a delete marker in s3 and ' +
|
||||||
'aws successfully when deleting non-existing object', done => {
|
'aws successfully when deleting non-existing object', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
|
|
@ -32,7 +32,7 @@ function testSuite() {
|
||||||
let bucketUtil;
|
let bucketUtil;
|
||||||
let s3;
|
let s3;
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
process.stdout.write('Creating bucket');
|
process.stdout.write('Creating bucket');
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
|
@ -43,7 +43,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('Emptying bucket\n');
|
process.stdout.write('Emptying bucket\n');
|
||||||
return bucketUtil.empty(azureContainerName)
|
return bucketUtil.empty(azureContainerName)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
|
@ -59,7 +59,7 @@ function testSuite() {
|
||||||
keys.forEach(key => {
|
keys.forEach(key => {
|
||||||
const keyName = uniqName(keyObject);
|
const keyName = uniqName(keyObject);
|
||||||
describe(`${key.describe} size`, () => {
|
describe(`${key.describe} size`, () => {
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: keyName,
|
Key: keyName,
|
||||||
|
@ -70,19 +70,17 @@ function testSuite() {
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should delete an ${key.describe} object from Azure`,
|
test(`should delete an ${key.describe} object from Azure`, done => {
|
||||||
done => {
|
|
||||||
s3.deleteObject({
|
s3.deleteObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: keyName,
|
Key: keyName,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, 'Expected success ' +
|
expect(err).toEqual(null);
|
||||||
`but got error ${err}`);
|
|
||||||
setTimeout(() =>
|
setTimeout(() =>
|
||||||
azureClient.getBlobProperties(azureContainerName,
|
azureClient.getBlobProperties(azureContainerName,
|
||||||
keyName, err => {
|
keyName, err => {
|
||||||
assert.strictEqual(err.statusCode, 404);
|
expect(err.statusCode).toBe(404);
|
||||||
assert.strictEqual(err.code, 'NotFound');
|
expect(err.code).toBe('NotFound');
|
||||||
return done();
|
return done();
|
||||||
}), azureTimeout);
|
}), azureTimeout);
|
||||||
});
|
});
|
||||||
|
@ -92,11 +90,17 @@ function testSuite() {
|
||||||
|
|
||||||
describe('delete from Azure location with bucketMatch set to false',
|
describe('delete from Azure location with bucketMatch set to false',
|
||||||
() => {
|
() => {
|
||||||
beforeEach(function beforeF(done) {
|
let testContext;
|
||||||
this.currentTest.azureObject = uniqName(keyObject);
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
testContext.currentTest.azureObject = uniqName(keyObject);
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.azureObject,
|
Key: testContext.currentTest.azureObject,
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
Metadata: {
|
Metadata: {
|
||||||
'scal-location-constraint': azureLocationMismatch,
|
'scal-location-constraint': azureLocationMismatch,
|
||||||
|
@ -104,19 +108,18 @@ function testSuite() {
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete object', function itF(done) {
|
test('should delete object', done => {
|
||||||
s3.deleteObject({
|
s3.deleteObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.azureObject,
|
Key: testContext.test.azureObject,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, 'Expected success ' +
|
expect(err).toEqual(null);
|
||||||
`but got error ${err}`);
|
|
||||||
setTimeout(() =>
|
setTimeout(() =>
|
||||||
azureClient.getBlobProperties(azureContainerName,
|
azureClient.getBlobProperties(azureContainerName,
|
||||||
`${azureContainerName}/${this.test.azureObject}`,
|
`${azureContainerName}/${testContext.test.azureObject}`,
|
||||||
err => {
|
err => {
|
||||||
assert.strictEqual(err.statusCode, 404);
|
expect(err.statusCode).toBe(404);
|
||||||
assert.strictEqual(err.code, 'NotFound');
|
expect(err.code).toBe('NotFound');
|
||||||
return done();
|
return done();
|
||||||
}), azureTimeout);
|
}), azureTimeout);
|
||||||
});
|
});
|
||||||
|
@ -124,46 +127,55 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('returning no error', () => {
|
describe('returning no error', () => {
|
||||||
beforeEach(function beF(done) {
|
let testContext;
|
||||||
this.currentTest.azureObject = uniqName(keyObject);
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
testContext.currentTest.azureObject = uniqName(keyObject);
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.azureObject,
|
Key: testContext.currentTest.azureObject,
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
Metadata: {
|
Metadata: {
|
||||||
'scal-location-constraint': azureLocation,
|
'scal-location-constraint': azureLocation,
|
||||||
},
|
},
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
|
||||||
azureClient.deleteBlob(azureContainerName,
|
azureClient.deleteBlob(azureContainerName,
|
||||||
this.currentTest.azureObject, err => {
|
testContext.currentTest.azureObject, err => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
|
||||||
done(err);
|
done(err);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return no error on deleting an object deleted ' +
|
test('should return no error on deleting an object deleted ' +
|
||||||
'from Azure', function itF(done) {
|
'from Azure', done => {
|
||||||
s3.deleteObject({
|
s3.deleteObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.azureObject,
|
Key: testContext.test.azureObject,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Versioning:: ', () => {
|
describe('Versioning:: ', () => {
|
||||||
beforeEach(function beF(done) {
|
let testContext;
|
||||||
this.currentTest.azureObject = uniqName(keyObject);
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
testContext.currentTest.azureObject = uniqName(keyObject);
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.azureObject,
|
Key: testContext.currentTest.azureObject,
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
Metadata: {
|
Metadata: {
|
||||||
'scal-location-constraint': azureLocation,
|
'scal-location-constraint': azureLocation,
|
||||||
|
@ -171,27 +183,25 @@ function testSuite() {
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not delete object when deleting a non-existing ' +
|
test('should not delete object when deleting a non-existing ' +
|
||||||
'version from Azure', function itF(done) {
|
'version from Azure', done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.deleteObject({
|
next => s3.deleteObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.azureObject,
|
Key: testContext.test.azureObject,
|
||||||
VersionId: nonExistingId,
|
VersionId: nonExistingId,
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.getObject({
|
next => s3.getObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.azureObject,
|
Key: testContext.test.azureObject,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.equal(err, null, 'getObject: Expected success ' +
|
expect(err).toEqual(null);
|
||||||
`but got error ${err}`);
|
|
||||||
assert.deepStrictEqual(res.Body, normalBody);
|
assert.deepStrictEqual(res.Body, normalBody);
|
||||||
return next(err);
|
return next(err);
|
||||||
}),
|
}),
|
||||||
next => azureClient.getBlobToText(azureContainerName,
|
next => azureClient.getBlobToText(azureContainerName,
|
||||||
this.test.azureObject, (err, res) => {
|
testContext.test.azureObject, (err, res) => {
|
||||||
assert.equal(err, null, 'getBlobToText: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`successbut got error ${err}`);
|
|
||||||
assert.deepStrictEqual(Buffer.from(res, 'utf8'),
|
assert.deepStrictEqual(Buffer.from(res, 'utf8'),
|
||||||
normalBody);
|
normalBody);
|
||||||
return next();
|
return next();
|
||||||
|
@ -201,48 +211,52 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with ongoing MPU: ', () => {
|
describe('with ongoing MPU: ', () => {
|
||||||
beforeEach(function beF(done) {
|
let testContext;
|
||||||
this.currentTest.key = uniqName(keyObject);
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
testContext.currentTest.key = uniqName(keyObject);
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Err putting object to Azure: ' +
|
expect(err).toEqual(null);
|
||||||
`${err}`);
|
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
s3.createMultipartUpload(params, (err, res) => {
|
s3.createMultipartUpload(params, (err, res) => {
|
||||||
assert.equal(err, null, 'Err initiating MPU on ' +
|
expect(err).toEqual(null);
|
||||||
`Azure: ${err}`);
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
this.currentTest.uploadId = res.UploadId;
|
|
||||||
setTimeout(() => done(), azureTimeout);
|
setTimeout(() => done(), azureTimeout);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(function afF(done) {
|
afterEach(done => {
|
||||||
s3.abortMultipartUpload({
|
s3.abortMultipartUpload({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
UploadId: this.currentTest.uploadId,
|
UploadId: testContext.currentTest.uploadId,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, `Err aborting MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
setTimeout(() => done(), azureTimeout);
|
setTimeout(() => done(), azureTimeout);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InternalError', function itFn(done) {
|
test('should return InternalError', done => {
|
||||||
s3.deleteObject({
|
s3.deleteObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.strictEqual(err.code, 'MPUinProgress');
|
expect(err.code).toBe('MPUinProgress');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -24,7 +24,7 @@ function testSuite() {
|
||||||
let bucketUtil;
|
let bucketUtil;
|
||||||
let s3;
|
let s3;
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
process.stdout.write('Creating bucket\n');
|
process.stdout.write('Creating bucket\n');
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
|
@ -63,7 +63,7 @@ function testSuite() {
|
||||||
throw err;
|
throw err;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('Deleting bucket\n');
|
process.stdout.write('Deleting bucket\n');
|
||||||
return bucketUtil.deleteOne(bucket)
|
return bucketUtil.deleteOne(bucket)
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
|
@ -93,22 +93,21 @@ function testSuite() {
|
||||||
];
|
];
|
||||||
deleteTests.forEach(test => {
|
deleteTests.forEach(test => {
|
||||||
const { msg, Bucket, Key } = test;
|
const { msg, Bucket, Key } = test;
|
||||||
it(msg, done => s3.deleteObject({ Bucket, Key }, err => {
|
test(msg, done => s3.deleteObject({ Bucket, Key }, err => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
s3.getObject({ Bucket, Key }, err => {
|
s3.getObject({ Bucket, Key }, err => {
|
||||||
assert.strictEqual(err.code, 'NoSuchKey', 'Expected ' +
|
expect(err.code).toBe('NoSuchKey');
|
||||||
'error but got success');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return success if the object does not exist',
|
test(
|
||||||
|
'should return success if the object does not exist',
|
||||||
done => s3.deleteObject({ Bucket: bucket, Key: 'noop' }, err => {
|
done => s3.deleteObject({ Bucket: bucket, Key: 'noop' }, err => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
done();
|
done();
|
||||||
}));
|
})
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -26,12 +26,18 @@ const correctMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a';
|
||||||
const emptyMD5 = 'd41d8cd98f00b204e9800998ecf8427e';
|
const emptyMD5 = 'd41d8cd98f00b204e9800998ecf8427e';
|
||||||
const bigMD5 = 'f1c9645dbc14efddc7d8a322685f26eb';
|
const bigMD5 = 'f1c9645dbc14efddc7d8a322685f26eb';
|
||||||
|
|
||||||
describe('Multiple backend get object', function testSuite() {
|
describe('Multiple backend get object', () => {
|
||||||
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
this.timeout(30000);
|
this.timeout(30000);
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
let bucketUtil;
|
let bucketUtil;
|
||||||
let s3;
|
let s3;
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
process.stdout.write('Creating bucket');
|
process.stdout.write('Creating bucket');
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
|
@ -42,7 +48,7 @@ describe('Multiple backend get object', function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('Emptying bucket\n');
|
process.stdout.write('Emptying bucket\n');
|
||||||
return bucketUtil.empty(bucket)
|
return bucketUtil.empty(bucket)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
|
@ -56,47 +62,46 @@ describe('Multiple backend get object', function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error to get request without a valid bucket name',
|
test(
|
||||||
|
'should return an error to get request without a valid bucket name',
|
||||||
done => {
|
done => {
|
||||||
s3.getObject({ Bucket: '', Key: 'somekey' }, err => {
|
s3.getObject({ Bucket: '', Key: 'somekey' }, err => {
|
||||||
assert.notEqual(err, null,
|
expect(err).not.toEqual(null);
|
||||||
'Expected failure but got success');
|
expect(err.code).toBe('MethodNotAllowed');
|
||||||
assert.strictEqual(err.code, 'MethodNotAllowed');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
it('should return NoSuchKey error when no such object',
|
);
|
||||||
done => {
|
test('should return NoSuchKey error when no such object', done => {
|
||||||
s3.getObject({ Bucket: bucket, Key: 'nope' }, err => {
|
s3.getObject({ Bucket: bucket, Key: 'nope' }, err => {
|
||||||
assert.notEqual(err, null,
|
expect(err).not.toEqual(null);
|
||||||
'Expected failure but got success');
|
expect(err.code).toBe('NoSuchKey');
|
||||||
assert.strictEqual(err.code, 'NoSuchKey');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describeSkipIfNotMultiple('Complete MPU then get object on AWS ' +
|
describeSkipIfNotMultiple('Complete MPU then get object on AWS ' +
|
||||||
'location with bucketMatch: true ', () => {
|
'location with bucketMatch: true ', () => {
|
||||||
beforeEach(function beforeEachFn(done) {
|
beforeEach(done => {
|
||||||
this.currentTest.key = `somekey-${genUniqID()}`;
|
testContext.currentTest.key = `somekey-${genUniqID()}`;
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
|
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createMultipartUpload({
|
next => s3.createMultipartUpload({
|
||||||
Bucket: bucket, Key: this.currentTest.key,
|
Bucket: bucket, Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation,
|
Metadata: { 'scal-location-constraint': awsLocation,
|
||||||
} }, (err, res) => next(err, res.UploadId)),
|
} }, (err, res) => next(err, res.UploadId)),
|
||||||
(uploadId, next) => s3.uploadPart({
|
(uploadId, next) => s3.uploadPart({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: uploadId,
|
UploadId: uploadId,
|
||||||
Body: 'helloworld' }, (err, res) => next(err, uploadId,
|
Body: 'helloworld' }, (err, res) => next(err, uploadId,
|
||||||
res.ETag)),
|
res.ETag)),
|
||||||
(uploadId, eTag, next) => s3.completeMultipartUpload({
|
(uploadId, eTag, next) => s3.completeMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
MultipartUpload: {
|
MultipartUpload: {
|
||||||
Parts: [
|
Parts: [
|
||||||
{
|
{
|
||||||
|
@ -109,16 +114,15 @@ describe('Multiple backend get object', function testSuite() {
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
it('should get object from MPU on AWS ' +
|
test('should get object from MPU on AWS ' +
|
||||||
'location with bucketMatch: true ', function it(done) {
|
'location with bucketMatch: true ', done => {
|
||||||
s3.getObject({
|
s3.getObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
expect(res.ContentLength).toBe('10');
|
||||||
assert.strictEqual(res.ContentLength, '10');
|
expect(res.Body.toString()).toBe('helloworld');
|
||||||
assert.strictEqual(res.Body.toString(), 'helloworld');
|
|
||||||
assert.deepStrictEqual(res.Metadata,
|
assert.deepStrictEqual(res.Metadata,
|
||||||
{ 'scal-location-constraint': awsLocation });
|
{ 'scal-location-constraint': awsLocation });
|
||||||
return done(err);
|
return done(err);
|
||||||
|
@ -128,27 +132,27 @@ describe('Multiple backend get object', function testSuite() {
|
||||||
|
|
||||||
describeSkipIfNotMultiple('Complete MPU then get object on AWS ' +
|
describeSkipIfNotMultiple('Complete MPU then get object on AWS ' +
|
||||||
'location with bucketMatch: false ', () => {
|
'location with bucketMatch: false ', () => {
|
||||||
beforeEach(function beforeEachFn(done) {
|
beforeEach(done => {
|
||||||
this.currentTest.key = `somekey-${genUniqID()}`;
|
testContext.currentTest.key = `somekey-${genUniqID()}`;
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
|
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createMultipartUpload({
|
next => s3.createMultipartUpload({
|
||||||
Bucket: bucket, Key: this.currentTest.key,
|
Bucket: bucket, Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint':
|
Metadata: { 'scal-location-constraint':
|
||||||
awsLocationMismatch,
|
awsLocationMismatch,
|
||||||
} }, (err, res) => next(err, res.UploadId)),
|
} }, (err, res) => next(err, res.UploadId)),
|
||||||
(uploadId, next) => s3.uploadPart({
|
(uploadId, next) => s3.uploadPart({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: uploadId,
|
UploadId: uploadId,
|
||||||
Body: 'helloworld' }, (err, res) => next(err, uploadId,
|
Body: 'helloworld' }, (err, res) => next(err, uploadId,
|
||||||
res.ETag)),
|
res.ETag)),
|
||||||
(uploadId, eTag, next) => s3.completeMultipartUpload({
|
(uploadId, eTag, next) => s3.completeMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
MultipartUpload: {
|
MultipartUpload: {
|
||||||
Parts: [
|
Parts: [
|
||||||
{
|
{
|
||||||
|
@ -161,16 +165,15 @@ describe('Multiple backend get object', function testSuite() {
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
it('should get object from MPU on AWS ' +
|
test('should get object from MPU on AWS ' +
|
||||||
'location with bucketMatch: false ', function it(done) {
|
'location with bucketMatch: false ', done => {
|
||||||
s3.getObject({
|
s3.getObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
expect(res.ContentLength).toBe('10');
|
||||||
assert.strictEqual(res.ContentLength, '10');
|
expect(res.Body.toString()).toBe('helloworld');
|
||||||
assert.strictEqual(res.Body.toString(), 'helloworld');
|
|
||||||
assert.deepStrictEqual(res.Metadata,
|
assert.deepStrictEqual(res.Metadata,
|
||||||
{ 'scal-location-constraint': awsLocationMismatch });
|
{ 'scal-location-constraint': awsLocationMismatch });
|
||||||
return done(err);
|
return done(err);
|
||||||
|
@ -180,7 +183,7 @@ describe('Multiple backend get object', function testSuite() {
|
||||||
|
|
||||||
describeSkipIfNotMultiple('with objects in all available backends ' +
|
describeSkipIfNotMultiple('with objects in all available backends ' +
|
||||||
'(mem/file/AWS)', () => {
|
'(mem/file/AWS)', () => {
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
process.stdout.write('Putting object to mem\n');
|
process.stdout.write('Putting object to mem\n');
|
||||||
return s3.putObjectAsync({ Bucket: bucket, Key: memObject,
|
return s3.putObjectAsync({ Bucket: bucket, Key: memObject,
|
||||||
Body: body,
|
Body: body,
|
||||||
|
@ -226,69 +229,61 @@ describe('Multiple backend get object', function testSuite() {
|
||||||
throw err;
|
throw err;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should get an object from mem', done => {
|
test('should get an object from mem', done => {
|
||||||
s3.getObject({ Bucket: bucket, Key: memObject }, (err, res) => {
|
s3.getObject({ Bucket: bucket, Key: memObject }, (err, res) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should get a 0-byte object from mem', done => {
|
test('should get a 0-byte object from mem', done => {
|
||||||
s3.getObject({ Bucket: bucket, Key: emptyObject },
|
s3.getObject({ Bucket: bucket, Key: emptyObject },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
expect(res.ETag).toBe(`"${emptyMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${emptyMD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should get a 0-byte object from AWS', done => {
|
test('should get a 0-byte object from AWS', done => {
|
||||||
s3.getObject({ Bucket: bucket, Key: emptyAwsObject },
|
s3.getObject({ Bucket: bucket, Key: emptyAwsObject },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'Expected success but got error ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
expect(res.ETag).toBe(`"${emptyMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${emptyMD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should get an object from file', done => {
|
test('should get an object from file', done => {
|
||||||
s3.getObject({ Bucket: bucket, Key: fileObject },
|
s3.getObject({ Bucket: bucket, Key: fileObject },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should get an object from AWS', done => {
|
test('should get an object from AWS', done => {
|
||||||
s3.getObject({ Bucket: bucket, Key: awsObject },
|
s3.getObject({ Bucket: bucket, Key: awsObject },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should get a large object from AWS', done => {
|
test('should get a large object from AWS', done => {
|
||||||
s3.getObject({ Bucket: bucket, Key: bigObject },
|
s3.getObject({ Bucket: bucket, Key: bigObject },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
expect(res.ETag).toBe(`"${bigMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${bigMD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should get an object using range query from AWS', done => {
|
test('should get an object using range query from AWS', done => {
|
||||||
s3.getObject({ Bucket: bucket, Key: bigObject,
|
s3.getObject({ Bucket: bucket, Key: bigObject,
|
||||||
Range: 'bytes=0-9' },
|
Range: 'bytes=0-9' },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
expect(res.ContentLength).toBe('10');
|
||||||
assert.strictEqual(res.ContentLength, '10');
|
expect(res.ContentRange).toBe(`bytes 0-9/${bigBodyLen}`);
|
||||||
assert.strictEqual(res.ContentRange,
|
expect(res.ETag).toBe(`"${bigMD5}"`);
|
||||||
`bytes 0-9/${bigBodyLen}`);
|
|
||||||
assert.strictEqual(res.ETag, `"${bigMD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -299,16 +294,16 @@ describe('Multiple backend get object', function testSuite() {
|
||||||
s3.putObject({ Bucket: bucket, Key: mismatchObject, Body: body,
|
s3.putObject({ Bucket: bucket, Key: mismatchObject, Body: body,
|
||||||
Metadata: { 'scal-location-constraint': awsLocationMismatch } },
|
Metadata: { 'scal-location-constraint': awsLocationMismatch } },
|
||||||
err => {
|
err => {
|
||||||
assert.equal(err, null, `Err putting object: ${err}`);
|
expect(err).toEqual(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get an object from AWS', done => {
|
test('should get an object from AWS', done => {
|
||||||
s3.getObject({ Bucket: bucket, Key: mismatchObject },
|
s3.getObject({ Bucket: bucket, Key: mismatchObject },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, `Error getting object: ${err}`);
|
expect(err).toEqual(null);
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -25,8 +25,7 @@ function getAndAssertVersions(s3, bucket, key, versionIds, expectedData,
|
||||||
s3.getObject({ Bucket: bucket, Key: key,
|
s3.getObject({ Bucket: bucket, Key: key,
|
||||||
VersionId: versionId }, next);
|
VersionId: versionId }, next);
|
||||||
}, (err, results) => {
|
}, (err, results) => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`getting object, got error ${err}`);
|
|
||||||
const resultIds = results.map(result => result.VersionId);
|
const resultIds = results.map(result => result.VersionId);
|
||||||
const resultData = results.map(result =>
|
const resultData = results.map(result =>
|
||||||
result.Body.toString());
|
result.Body.toString());
|
||||||
|
@ -68,35 +67,33 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not return version ids when versioning has not been ' +
|
test('should not return version ids when versioning has not been ' +
|
||||||
'configured via CloudServer', done => {
|
'configured via CloudServer', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
s3.putObject({ Bucket: bucket, Key: key, Body: someBody,
|
s3.putObject({ Bucket: bucket, Key: key, Body: someBody,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation } },
|
Metadata: { 'scal-location-constraint': awsLocation } },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`putting object, got error ${err}`);
|
expect(data.VersionId).toBe(undefined);
|
||||||
assert.strictEqual(data.VersionId, undefined);
|
|
||||||
getAndAssertResult(s3, { bucket, key, body: someBody,
|
getAndAssertResult(s3, { bucket, key, body: someBody,
|
||||||
expectedVersionId: false }, done);
|
expectedVersionId: false }, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not return version ids when versioning has not been ' +
|
test('should not return version ids when versioning has not been ' +
|
||||||
'configured via CloudServer, even when version id specified', done => {
|
'configured via CloudServer, even when version id specified', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
s3.putObject({ Bucket: bucket, Key: key, Body: someBody,
|
s3.putObject({ Bucket: bucket, Key: key, Body: someBody,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation } },
|
Metadata: { 'scal-location-constraint': awsLocation } },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`putting object, got error ${err}`);
|
expect(data.VersionId).toBe(undefined);
|
||||||
assert.strictEqual(data.VersionId, undefined);
|
|
||||||
getAndAssertResult(s3, { bucket, key, body: someBody,
|
getAndAssertResult(s3, { bucket, key, body: someBody,
|
||||||
versionId: 'null', expectedVersionId: false }, done);
|
versionId: 'null', expectedVersionId: false }, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return version id for null version when versioning ' +
|
test('should return version id for null version when versioning ' +
|
||||||
'has been configured via CloudServer', done => {
|
'has been configured via CloudServer', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -113,7 +110,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should overwrite the null version if putting object twice ' +
|
test('should overwrite the null version if putting object twice ' +
|
||||||
'before versioning is configured', done => {
|
'before versioning is configured', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const data = ['data1', 'data2'];
|
const data = ['data1', 'data2'];
|
||||||
|
@ -128,7 +125,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should overwrite existing null version if putting object ' +
|
test('should overwrite existing null version if putting object ' +
|
||||||
'after suspending versioning', done => {
|
'after suspending versioning', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const data = ['data1', 'data2'];
|
const data = ['data1', 'data2'];
|
||||||
|
@ -149,7 +146,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should overwrite null version if putting object when ' +
|
test('should overwrite null version if putting object when ' +
|
||||||
'versioning is suspended after versioning enabled', done => {
|
'versioning is suspended after versioning enabled', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const data = [...Array(3).keys()].map(i => `data${i}`);
|
const data = [...Array(3).keys()].map(i => `data${i}`);
|
||||||
|
@ -162,9 +159,8 @@ function testSuite() {
|
||||||
next => s3.putObject({ Bucket: bucket, Key: key, Body: data[1],
|
next => s3.putObject({ Bucket: bucket, Key: key, Body: data[1],
|
||||||
Metadata: { 'scal-location-constraint': awsLocation } },
|
Metadata: { 'scal-location-constraint': awsLocation } },
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`putting object, got error ${err}`);
|
expect(result.VersionId).not.toEqual('null');
|
||||||
assert.notEqual(result.VersionId, 'null');
|
|
||||||
firstVersionId = result.VersionId;
|
firstVersionId = result.VersionId;
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
|
@ -185,8 +181,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get correct data from aws backend using version IDs',
|
test('should get correct data from aws backend using version IDs', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const data = [...Array(5).keys()].map(i => i.toString());
|
const data = [...Array(5).keys()].map(i => i.toString());
|
||||||
const versionIds = ['null'];
|
const versionIds = ['null'];
|
||||||
|
@ -204,8 +199,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get correct version when getting without version ID',
|
test('should get correct version when getting without version ID', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const data = [...Array(5).keys()].map(i => i.toString());
|
const data = [...Array(5).keys()].map(i => i.toString());
|
||||||
const versionIds = ['null'];
|
const versionIds = ['null'];
|
||||||
|
@ -223,10 +217,9 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get correct data from aws backend using version IDs ' +
|
test('should get correct data from aws backend using version IDs ' +
|
||||||
'after putting null versions, putting versions, putting more null ' +
|
'after putting null versions, putting versions, putting more null ' +
|
||||||
'versions and then putting more versions',
|
'versions and then putting more versions', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const data = [...Array(16).keys()].map(i => i.toString());
|
const data = [...Array(16).keys()].map(i => i.toString());
|
||||||
// put three null versions,
|
// put three null versions,
|
||||||
|
@ -270,9 +263,8 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return the correct data getting versioned object ' +
|
test('should return the correct data getting versioned object ' +
|
||||||
'even if object was deleted from AWS (creating a delete marker)',
|
'even if object was deleted from AWS (creating a delete marker)', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => enableVersioning(s3, bucket, next),
|
next => enableVersioning(s3, bucket, next),
|
||||||
|
@ -287,9 +279,8 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return the correct data getting versioned object ' +
|
test('should return the correct data getting versioned object ' +
|
||||||
'even if object is put directly to AWS (creating new version)',
|
'even if object is put directly to AWS (creating new version)', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => enableVersioning(s3, bucket, next),
|
next => enableVersioning(s3, bucket, next),
|
||||||
|
@ -304,9 +295,8 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return a ServiceUnavailable if trying to get an object ' +
|
test('should return a ServiceUnavailable if trying to get an object ' +
|
||||||
'that was deleted in AWS but exists in s3 metadata',
|
'that was deleted in AWS but exists in s3 metadata', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => enableVersioning(s3, bucket, next),
|
next => enableVersioning(s3, bucket, next),
|
||||||
|
@ -321,16 +311,15 @@ function testSuite() {
|
||||||
err => next(err, s3VerId)),
|
err => next(err, s3VerId)),
|
||||||
(s3VerId, next) => s3.getObject({ Bucket: bucket, Key: key },
|
(s3VerId, next) => s3.getObject({ Bucket: bucket, Key: key },
|
||||||
err => {
|
err => {
|
||||||
assert.strictEqual(err.code, 'ServiceUnavailable');
|
expect(err.code).toBe('ServiceUnavailable');
|
||||||
assert.strictEqual(err.statusCode, 503);
|
expect(err.statusCode).toBe(503);
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return a ServiceUnavailable if trying to get a version ' +
|
test('should return a ServiceUnavailable if trying to get a version ' +
|
||||||
'that was deleted in AWS but exists in s3 metadata',
|
'that was deleted in AWS but exists in s3 metadata', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => enableVersioning(s3, bucket, next),
|
next => enableVersioning(s3, bucket, next),
|
||||||
|
@ -345,8 +334,8 @@ function testSuite() {
|
||||||
err => next(err, s3VerId)),
|
err => next(err, s3VerId)),
|
||||||
(s3VerId, next) => s3.getObject({ Bucket: bucket, Key: key,
|
(s3VerId, next) => s3.getObject({ Bucket: bucket, Key: key,
|
||||||
VersionId: s3VerId }, err => {
|
VersionId: s3VerId }, err => {
|
||||||
assert.strictEqual(err.code, 'ServiceUnavailable');
|
expect(err.code).toBe('ServiceUnavailable');
|
||||||
assert.strictEqual(err.statusCode, 503);
|
expect(err.statusCode).toBe(503);
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
|
|
|
@ -28,7 +28,7 @@ function testSuite() {
|
||||||
let bucketUtil;
|
let bucketUtil;
|
||||||
let s3;
|
let s3;
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
process.stdout.write('Creating bucket');
|
process.stdout.write('Creating bucket');
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
|
@ -39,7 +39,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('Emptying bucket\n');
|
process.stdout.write('Emptying bucket\n');
|
||||||
return bucketUtil.empty(azureContainerName)
|
return bucketUtil.empty(azureContainerName)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
|
@ -55,7 +55,7 @@ function testSuite() {
|
||||||
keys.forEach(key => {
|
keys.forEach(key => {
|
||||||
describe(`${key.describe} size`, () => {
|
describe(`${key.describe} size`, () => {
|
||||||
const testKey = `${key.name}-${Date.now()}`;
|
const testKey = `${key.name}-${Date.now()}`;
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
|
@ -68,13 +68,12 @@ function testSuite() {
|
||||||
}, azureTimeout);
|
}, azureTimeout);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should get an ${key.describe} object from Azure`, done => {
|
test(`should get an ${key.describe} object from Azure`, done => {
|
||||||
s3.getObject({ Bucket: azureContainerName, Key:
|
s3.getObject({ Bucket: azureContainerName, Key:
|
||||||
testKey },
|
testKey },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'Expected success ' +
|
expect(err).toEqual(null);
|
||||||
`but got error ${err}`);
|
expect(res.ETag).toBe(`"${key.MD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${key.MD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -83,7 +82,7 @@ function testSuite() {
|
||||||
|
|
||||||
describe('with range', () => {
|
describe('with range', () => {
|
||||||
const azureObject = uniqName(keyObject);
|
const azureObject = uniqName(keyObject);
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: azureObject,
|
Key: azureObject,
|
||||||
|
@ -94,33 +93,29 @@ function testSuite() {
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get an object with body 012345 with "bytes=0-5"',
|
test('should get an object with body 012345 with "bytes=0-5"', done => {
|
||||||
done => {
|
|
||||||
s3.getObject({
|
s3.getObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: azureObject,
|
Key: azureObject,
|
||||||
Range: 'bytes=0-5',
|
Range: 'bytes=0-5',
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
expect(res.ContentLength).toEqual(6);
|
||||||
assert.equal(res.ContentLength, 6);
|
expect(res.ContentRange).toBe('bytes 0-5/10');
|
||||||
assert.strictEqual(res.ContentRange, 'bytes 0-5/10');
|
expect(res.Body.toString()).toBe('012345');
|
||||||
assert.strictEqual(res.Body.toString(), '012345');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should get an object with body 456789 with "bytes=4-"',
|
test('should get an object with body 456789 with "bytes=4-"', done => {
|
||||||
done => {
|
|
||||||
s3.getObject({
|
s3.getObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: azureObject,
|
Key: azureObject,
|
||||||
Range: 'bytes=4-',
|
Range: 'bytes=4-',
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
expect(res.ContentLength).toEqual(6);
|
||||||
assert.equal(res.ContentLength, 6);
|
expect(res.ContentRange).toBe('bytes 4-9/10');
|
||||||
assert.strictEqual(res.ContentRange, 'bytes 4-9/10');
|
expect(res.Body.toString()).toBe('456789');
|
||||||
assert.strictEqual(res.Body.toString(), '456789');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -128,7 +123,7 @@ function testSuite() {
|
||||||
|
|
||||||
describe('returning error', () => {
|
describe('returning error', () => {
|
||||||
const azureObject = uniqName(keyObject);
|
const azureObject = uniqName(keyObject);
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: azureObject,
|
Key: azureObject,
|
||||||
|
@ -137,24 +132,22 @@ function testSuite() {
|
||||||
'scal-location-constraint': azureLocation,
|
'scal-location-constraint': azureLocation,
|
||||||
},
|
},
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
|
||||||
azureClient.deleteBlob(azureContainerName, azureObject,
|
azureClient.deleteBlob(azureContainerName, azureObject,
|
||||||
err => {
|
err => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error ${err}`);
|
|
||||||
done(err);
|
done(err);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error on get done to object deleted ' +
|
test('should return an error on get done to object deleted ' +
|
||||||
'from Azure', done => {
|
'from Azure', done => {
|
||||||
s3.getObject({
|
s3.getObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: azureObject,
|
Key: azureObject,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.strictEqual(err.code, 'ServiceUnavailable');
|
expect(err.code).toBe('ServiceUnavailable');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -20,12 +20,12 @@ const correctMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a';
|
||||||
const emptyMD5 = 'd41d8cd98f00b204e9800998ecf8427e';
|
const emptyMD5 = 'd41d8cd98f00b204e9800998ecf8427e';
|
||||||
const bigMD5 = 'f1c9645dbc14efddc7d8a322685f26eb';
|
const bigMD5 = 'f1c9645dbc14efddc7d8a322685f26eb';
|
||||||
|
|
||||||
describe('Multiple backend get object', function testSuite() {
|
describe('Multiple backend get object', () => {
|
||||||
this.timeout(30000);
|
this.timeout(30000);
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
let bucketUtil;
|
let bucketUtil;
|
||||||
let s3;
|
let s3;
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
process.stdout.write('Creating bucket');
|
process.stdout.write('Creating bucket');
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
|
@ -36,7 +36,7 @@ describe('Multiple backend get object', function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('Emptying bucket\n');
|
process.stdout.write('Emptying bucket\n');
|
||||||
return bucketUtil.empty(bucket)
|
return bucketUtil.empty(bucket)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
|
@ -51,7 +51,7 @@ describe('Multiple backend get object', function testSuite() {
|
||||||
});
|
});
|
||||||
|
|
||||||
describeSkipIfNotMultipleOrCeph('with objects in GCP', () => {
|
describeSkipIfNotMultipleOrCeph('with objects in GCP', () => {
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
process.stdout.write('Putting object to GCP\n');
|
process.stdout.write('Putting object to GCP\n');
|
||||||
return s3.putObjectAsync({ Bucket: bucket, Key: gcpObject,
|
return s3.putObjectAsync({ Bucket: bucket, Key: gcpObject,
|
||||||
Body: body,
|
Body: body,
|
||||||
|
@ -108,16 +108,15 @@ describe('Multiple backend get object', function testSuite() {
|
||||||
getTests.forEach(test => {
|
getTests.forEach(test => {
|
||||||
const { Bucket, Key, range, size } = test.input;
|
const { Bucket, Key, range, size } = test.input;
|
||||||
const { MD5, contentRange } = test.output;
|
const { MD5, contentRange } = test.output;
|
||||||
it(test.msg, done => {
|
test(test.msg, done => {
|
||||||
s3.getObject({ Bucket, Key, Range: range },
|
s3.getObject({ Bucket, Key, Range: range },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Expected success but got error ${err}`);
|
|
||||||
if (range) {
|
if (range) {
|
||||||
assert.strictEqual(res.ContentLength, `${size}`);
|
expect(res.ContentLength).toBe(`${size}`);
|
||||||
assert.strictEqual(res.ContentRange, contentRange);
|
expect(res.ContentRange).toBe(contentRange);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.ETag, `"${MD5}"`);
|
expect(res.ETag).toBe(`"${MD5}"`);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -129,16 +128,16 @@ describe('Multiple backend get object', function testSuite() {
|
||||||
s3.putObject({ Bucket: bucket, Key: mismatchObject, Body: body,
|
s3.putObject({ Bucket: bucket, Key: mismatchObject, Body: body,
|
||||||
Metadata: { 'scal-location-constraint': gcpLocationMismatch } },
|
Metadata: { 'scal-location-constraint': gcpLocationMismatch } },
|
||||||
err => {
|
err => {
|
||||||
assert.equal(err, null, `Err putting object: ${err}`);
|
expect(err).toEqual(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get an object from GCP', done => {
|
test('should get an object from GCP', done => {
|
||||||
s3.getObject({ Bucket: bucket, Key: mismatchObject },
|
s3.getObject({ Bucket: bucket, Key: mismatchObject },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, `Error getting object: ${err}`);
|
expect(err).toEqual(null);
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -32,22 +32,29 @@ describeSkipIfNotMultipleOrCeph('Initiate MPU to AZURE', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
describe('Basic test: ', () => {
|
describe('Basic test: ', () => {
|
||||||
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
beforeEach(done =>
|
beforeEach(done =>
|
||||||
s3.createBucket({ Bucket: azureContainerName,
|
s3.createBucket({ Bucket: azureContainerName,
|
||||||
CreateBucketConfiguration: {
|
CreateBucketConfiguration: {
|
||||||
LocationConstraint: azureLocation,
|
LocationConstraint: azureLocation,
|
||||||
},
|
},
|
||||||
}, done));
|
}, done));
|
||||||
afterEach(function afterEachF(done) {
|
afterEach(done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: keyName,
|
Key: keyName,
|
||||||
UploadId: this.currentTest.uploadId,
|
UploadId: testContext.currentTest.uploadId,
|
||||||
};
|
};
|
||||||
s3.abortMultipartUpload(params, done);
|
s3.abortMultipartUpload(params, done);
|
||||||
});
|
});
|
||||||
it('should create MPU and list in-progress multipart uploads',
|
test(
|
||||||
function ifF(done) {
|
'should create MPU and list in-progress multipart uploads',
|
||||||
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: keyName,
|
Key: keyName,
|
||||||
|
@ -55,24 +62,23 @@ describeSkipIfNotMultipleOrCeph('Initiate MPU to AZURE', () => {
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createMultipartUpload(params, (err, res) => {
|
next => s3.createMultipartUpload(params, (err, res) => {
|
||||||
this.test.uploadId = res.UploadId;
|
testContext.test.uploadId = res.UploadId;
|
||||||
assert(this.test.uploadId);
|
expect(testContext.test.uploadId).toBeTruthy();
|
||||||
assert.strictEqual(res.Bucket, azureContainerName);
|
expect(res.Bucket).toBe(azureContainerName);
|
||||||
assert.strictEqual(res.Key, keyName);
|
expect(res.Key).toBe(keyName);
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => s3.listMultipartUploads(
|
next => s3.listMultipartUploads(
|
||||||
{ Bucket: azureContainerName }, (err, res) => {
|
{ Bucket: azureContainerName }, (err, res) => {
|
||||||
assert.strictEqual(res.NextKeyMarker, keyName);
|
expect(res.NextKeyMarker).toBe(keyName);
|
||||||
assert.strictEqual(res.NextUploadIdMarker,
|
expect(res.NextUploadIdMarker).toBe(testContext.test.uploadId);
|
||||||
this.test.uploadId);
|
expect(res.Uploads[0].Key).toBe(keyName);
|
||||||
assert.strictEqual(res.Uploads[0].Key, keyName);
|
expect(res.Uploads[0].UploadId).toBe(testContext.test.uploadId);
|
||||||
assert.strictEqual(res.Uploads[0].UploadId,
|
|
||||||
this.test.uploadId);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -34,22 +34,29 @@ describeSkipIfNotMultipleOrCeph('Initiate MPU to GCP', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
describe('Basic test: ', () => {
|
describe('Basic test: ', () => {
|
||||||
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
beforeEach(done =>
|
beforeEach(done =>
|
||||||
s3.createBucket({ Bucket: bucket,
|
s3.createBucket({ Bucket: bucket,
|
||||||
CreateBucketConfiguration: {
|
CreateBucketConfiguration: {
|
||||||
LocationConstraint: gcpLocation,
|
LocationConstraint: gcpLocation,
|
||||||
},
|
},
|
||||||
}, done));
|
}, done));
|
||||||
afterEach(function afterEachF(done) {
|
afterEach(done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: keyName,
|
Key: keyName,
|
||||||
UploadId: this.currentTest.uploadId,
|
UploadId: testContext.currentTest.uploadId,
|
||||||
};
|
};
|
||||||
s3.abortMultipartUpload(params, done);
|
s3.abortMultipartUpload(params, done);
|
||||||
});
|
});
|
||||||
it('should create MPU and list in-progress multipart uploads',
|
test(
|
||||||
function ifF(done) {
|
'should create MPU and list in-progress multipart uploads',
|
||||||
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: keyName,
|
Key: keyName,
|
||||||
|
@ -57,25 +64,23 @@ describeSkipIfNotMultipleOrCeph('Initiate MPU to GCP', () => {
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createMultipartUpload(params, (err, res) => {
|
next => s3.createMultipartUpload(params, (err, res) => {
|
||||||
this.test.uploadId = res.UploadId;
|
testContext.test.uploadId = res.UploadId;
|
||||||
assert(this.test.uploadId);
|
expect(testContext.test.uploadId).toBeTruthy();
|
||||||
assert.strictEqual(res.Bucket, bucket);
|
expect(res.Bucket).toBe(bucket);
|
||||||
assert.strictEqual(res.Key, keyName);
|
expect(res.Key).toBe(keyName);
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => s3.listMultipartUploads(
|
next => s3.listMultipartUploads(
|
||||||
{ Bucket: bucket }, (err, res) => {
|
{ Bucket: bucket }, (err, res) => {
|
||||||
assert.strictEqual(res.NextKeyMarker, keyName);
|
expect(res.NextKeyMarker).toBe(keyName);
|
||||||
assert.strictEqual(res.NextUploadIdMarker,
|
expect(res.NextUploadIdMarker).toBe(testContext.test.uploadId);
|
||||||
this.test.uploadId);
|
expect(res.Uploads[0].Key).toBe(keyName);
|
||||||
assert.strictEqual(res.Uploads[0].Key, keyName);
|
expect(res.Uploads[0].UploadId).toBe(testContext.test.uploadId);
|
||||||
assert.strictEqual(res.Uploads[0].UploadId,
|
|
||||||
this.test.uploadId);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
const mpuKey =
|
const mpuKey =
|
||||||
createMpuKey(keyName, this.test.uploadId, 'init');
|
createMpuKey(keyName, testContext.test.uploadId, 'init');
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: gcpBucketMPU,
|
Bucket: gcpBucketMPU,
|
||||||
Key: mpuKey,
|
Key: mpuKey,
|
||||||
|
@ -87,7 +92,8 @@ describeSkipIfNotMultipleOrCeph('Initiate MPU to GCP', () => {
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -17,7 +17,7 @@ let s3;
|
||||||
describeSkipIfNotMultipleOrCeph('List parts of MPU on Azure data backend',
|
describeSkipIfNotMultipleOrCeph('List parts of MPU on Azure data backend',
|
||||||
() => {
|
() => {
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
beforeEach(function beforeEachFn() {
|
beforeEach(() => {
|
||||||
this.currentTest.key = `somekey-${genUniqID()}`;
|
this.currentTest.key = `somekey-${genUniqID()}`;
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
|
@ -44,7 +44,7 @@ describeSkipIfNotMultipleOrCeph('List parts of MPU on Azure data backend',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(function afterEachFn() {
|
afterEach(() => {
|
||||||
process.stdout.write('Emptying bucket');
|
process.stdout.write('Emptying bucket');
|
||||||
return s3.abortMultipartUploadAsync({
|
return s3.abortMultipartUploadAsync({
|
||||||
Bucket: azureContainerName, Key: this.currentTest.key,
|
Bucket: azureContainerName, Key: this.currentTest.key,
|
||||||
|
@ -61,35 +61,35 @@ describeSkipIfNotMultipleOrCeph('List parts of MPU on Azure data backend',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list both parts', function itFn(done) {
|
test('should list both parts', done => {
|
||||||
s3.listParts({
|
s3.listParts({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: this.test.key,
|
||||||
UploadId: this.test.uploadId },
|
UploadId: this.test.uploadId },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.equal(err, null, `Err listing parts: ${err}`);
|
expect(err).toEqual(null);
|
||||||
assert.strictEqual(data.Parts.length, 2);
|
expect(data.Parts.length).toBe(2);
|
||||||
assert.strictEqual(data.Parts[0].PartNumber, 1);
|
expect(data.Parts[0].PartNumber).toBe(1);
|
||||||
assert.strictEqual(data.Parts[0].Size, firstPartSize);
|
expect(data.Parts[0].Size).toBe(firstPartSize);
|
||||||
assert.strictEqual(data.Parts[0].ETag, this.test.firstEtag);
|
expect(data.Parts[0].ETag).toBe(this.test.firstEtag);
|
||||||
assert.strictEqual(data.Parts[1].PartNumber, 2);
|
expect(data.Parts[1].PartNumber).toBe(2);
|
||||||
assert.strictEqual(data.Parts[1].Size, secondPartSize);
|
expect(data.Parts[1].Size).toBe(secondPartSize);
|
||||||
assert.strictEqual(data.Parts[1].ETag, this.test.secondEtag);
|
expect(data.Parts[1].ETag).toBe(this.test.secondEtag);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should only list the second part', function itFn(done) {
|
test('should only list the second part', done => {
|
||||||
s3.listParts({
|
s3.listParts({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: this.test.key,
|
||||||
PartNumberMarker: 1,
|
PartNumberMarker: 1,
|
||||||
UploadId: this.test.uploadId },
|
UploadId: this.test.uploadId },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.equal(err, null, `Err listing parts: ${err}`);
|
expect(err).toEqual(null);
|
||||||
assert.strictEqual(data.Parts[0].PartNumber, 2);
|
expect(data.Parts[0].PartNumber).toBe(2);
|
||||||
assert.strictEqual(data.Parts[0].Size, secondPartSize);
|
expect(data.Parts[0].Size).toBe(secondPartSize);
|
||||||
assert.strictEqual(data.Parts[0].ETag, this.test.secondEtag);
|
expect(data.Parts[0].ETag).toBe(this.test.secondEtag);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -16,7 +16,7 @@ let s3;
|
||||||
|
|
||||||
describeSkipIfNotMultipleOrCeph('List parts of MPU on GCP data backend', () => {
|
describeSkipIfNotMultipleOrCeph('List parts of MPU on GCP data backend', () => {
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
beforeEach(function beforeEachFn() {
|
beforeEach(() => {
|
||||||
this.currentTest.key = `somekey-${genUniqID()}`;
|
this.currentTest.key = `somekey-${genUniqID()}`;
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
|
@ -43,7 +43,7 @@ describeSkipIfNotMultipleOrCeph('List parts of MPU on GCP data backend', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(function afterEachFn() {
|
afterEach(() => {
|
||||||
process.stdout.write('Emptying bucket');
|
process.stdout.write('Emptying bucket');
|
||||||
return s3.abortMultipartUploadAsync({
|
return s3.abortMultipartUploadAsync({
|
||||||
Bucket: bucket, Key: this.currentTest.key,
|
Bucket: bucket, Key: this.currentTest.key,
|
||||||
|
@ -60,35 +60,35 @@ describeSkipIfNotMultipleOrCeph('List parts of MPU on GCP data backend', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list both parts', function itFn(done) {
|
test('should list both parts', done => {
|
||||||
s3.listParts({
|
s3.listParts({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.key,
|
Key: this.test.key,
|
||||||
UploadId: this.test.uploadId },
|
UploadId: this.test.uploadId },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.equal(err, null, `Err listing parts: ${err}`);
|
expect(err).toEqual(null);
|
||||||
assert.strictEqual(data.Parts.length, 2);
|
expect(data.Parts.length).toBe(2);
|
||||||
assert.strictEqual(data.Parts[0].PartNumber, 1);
|
expect(data.Parts[0].PartNumber).toBe(1);
|
||||||
assert.strictEqual(data.Parts[0].Size, firstPartSize);
|
expect(data.Parts[0].Size).toBe(firstPartSize);
|
||||||
assert.strictEqual(data.Parts[0].ETag, this.test.firstEtag);
|
expect(data.Parts[0].ETag).toBe(this.test.firstEtag);
|
||||||
assert.strictEqual(data.Parts[1].PartNumber, 2);
|
expect(data.Parts[1].PartNumber).toBe(2);
|
||||||
assert.strictEqual(data.Parts[1].Size, secondPartSize);
|
expect(data.Parts[1].Size).toBe(secondPartSize);
|
||||||
assert.strictEqual(data.Parts[1].ETag, this.test.secondEtag);
|
expect(data.Parts[1].ETag).toBe(this.test.secondEtag);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should only list the second part', function itFn(done) {
|
test('should only list the second part', done => {
|
||||||
s3.listParts({
|
s3.listParts({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.key,
|
Key: this.test.key,
|
||||||
PartNumberMarker: 1,
|
PartNumberMarker: 1,
|
||||||
UploadId: this.test.uploadId },
|
UploadId: this.test.uploadId },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.equal(err, null, `Err listing parts: ${err}`);
|
expect(err).toEqual(null);
|
||||||
assert.strictEqual(data.Parts[0].PartNumber, 2);
|
expect(data.Parts[0].PartNumber).toBe(2);
|
||||||
assert.strictEqual(data.Parts[0].Size, secondPartSize);
|
expect(data.Parts[0].Size).toBe(secondPartSize);
|
||||||
assert.strictEqual(data.Parts[0].ETag, this.test.secondEtag);
|
expect(data.Parts[0].ETag).toBe(this.test.secondEtag);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -34,26 +34,32 @@ describeSkipIfNotMultipleOrCeph('Abort MPU on GCP data backend', function
|
||||||
descrbeFn() {
|
descrbeFn() {
|
||||||
this.timeout(180000);
|
this.timeout(180000);
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
beforeEach(function beforeFn() {
|
beforeEach(() => {
|
||||||
this.currentTest.key = uniqName(keyObject);
|
this.currentTest.key = uniqName(keyObject);
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with bucket location header', () => {
|
describe('with bucket location header', () => {
|
||||||
beforeEach(function beforeEachFn(done) {
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createBucket({ Bucket: bucket },
|
next => s3.createBucket({ Bucket: bucket },
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
next => s3.createMultipartUpload({
|
next => s3.createMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint': gcpLocation },
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
this.currentTest.uploadId = res.UploadId;
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
|
@ -62,62 +68,67 @@ descrbeFn() {
|
||||||
afterEach(done => s3.deleteBucket({ Bucket: bucket },
|
afterEach(done => s3.deleteBucket({ Bucket: bucket },
|
||||||
done));
|
done));
|
||||||
|
|
||||||
it('should abort a MPU with 0 parts', function itFn(done) {
|
test('should abort a MPU with 0 parts', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.abortMultipartUpload(params, () => next()),
|
next => s3.abortMultipartUpload(params, () => next()),
|
||||||
next => setTimeout(() => checkMPUList(
|
next => setTimeout(() => checkMPUList(
|
||||||
gcpBucketMPU, this.test.key, this.test.uploadId, next),
|
gcpBucketMPU, testContext.test.key, testContext.test.uploadId, next),
|
||||||
gcpTimeout),
|
gcpTimeout),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should abort a MPU with uploaded parts', function itFn(done) {
|
test('should abort a MPU with uploaded parts', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => {
|
next => {
|
||||||
async.times(2, (n, cb) => {
|
async.times(2, (n, cb) => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
Body: body,
|
Body: body,
|
||||||
PartNumber: n + 1,
|
PartNumber: n + 1,
|
||||||
};
|
};
|
||||||
s3.uploadPart(params, (err, res) => {
|
s3.uploadPart(params, (err, res) => {
|
||||||
assert.ifError(err,
|
assert.ifError(err,
|
||||||
`Expected success, but got err ${err}`);
|
`Expected success, but got err ${err}`);
|
||||||
assert.strictEqual(
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
res.ETag, `"${correctMD5}"`);
|
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
}, () => next());
|
}, () => next());
|
||||||
},
|
},
|
||||||
next => s3.abortMultipartUpload(params, () => next()),
|
next => s3.abortMultipartUpload(params, () => next()),
|
||||||
next => setTimeout(() => checkMPUList(
|
next => setTimeout(() => checkMPUList(
|
||||||
gcpBucketMPU, this.test.key, this.test.uploadId, next),
|
gcpBucketMPU, testContext.test.key, testContext.test.uploadId, next),
|
||||||
gcpTimeout),
|
gcpTimeout),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with previously existing object with same key', () => {
|
describe('with previously existing object with same key', () => {
|
||||||
beforeEach(function beforeEachFn(done) {
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createBucket({ Bucket: bucket },
|
next => s3.createBucket({ Bucket: bucket },
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
next => {
|
next => {
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: {
|
Metadata: {
|
||||||
'scal-location-constraint': gcpLocation },
|
'scal-location-constraint': gcpLocation },
|
||||||
Body: body,
|
Body: body,
|
||||||
|
@ -129,13 +140,13 @@ descrbeFn() {
|
||||||
},
|
},
|
||||||
next => s3.createMultipartUpload({
|
next => s3.createMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint': gcpLocation },
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
this.currentTest.uploadId = res.UploadId;
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
|
@ -155,12 +166,11 @@ descrbeFn() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should abort MPU without deleting existing object',
|
test('should abort MPU without deleting existing object', done => {
|
||||||
function itFn(done) {
|
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => {
|
next => {
|
||||||
|
@ -177,12 +187,12 @@ descrbeFn() {
|
||||||
next => setTimeout(() => {
|
next => setTimeout(() => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: gcpBucket,
|
Bucket: gcpBucket,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
};
|
};
|
||||||
gcpClient.getObject(params, (err, res) => {
|
gcpClient.getObject(params, (err, res) => {
|
||||||
assert.ifError(err,
|
assert.ifError(err,
|
||||||
`Expected success, got error: ${err}`);
|
`Expected success, got error: ${err}`);
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
}, gcpTimeout),
|
}, gcpTimeout),
|
||||||
|
|
|
@ -20,11 +20,11 @@ let s3;
|
||||||
function azureCheck(container, key, expected, cb) {
|
function azureCheck(container, key, expected, cb) {
|
||||||
azureClient.getBlobProperties(container, key, (err, res) => {
|
azureClient.getBlobProperties(container, key, (err, res) => {
|
||||||
if (expected.error) {
|
if (expected.error) {
|
||||||
assert.strictEqual(err.statusCode, 404);
|
expect(err.statusCode).toBe(404);
|
||||||
assert.strictEqual(err.code, 'NotFound');
|
expect(err.code).toBe('NotFound');
|
||||||
} else {
|
} else {
|
||||||
const convertedMD5 = convertMD5(res.contentSettings.contentMD5);
|
const convertedMD5 = convertMD5(res.contentSettings.contentMD5);
|
||||||
assert.strictEqual(convertedMD5, expectedMD5);
|
expect(convertedMD5).toBe(expectedMD5);
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
|
@ -34,25 +34,31 @@ describeSkipIfNotMultipleOrCeph('Abort MPU on Azure data backend', function
|
||||||
describeF() {
|
describeF() {
|
||||||
this.timeout(50000);
|
this.timeout(50000);
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
beforeEach(function beforeFn() {
|
beforeEach(() => {
|
||||||
this.currentTest.key = uniqName(keyObject);
|
this.currentTest.key = uniqName(keyObject);
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
});
|
});
|
||||||
describe('with bucket location header', () => {
|
describe('with bucket location header', () => {
|
||||||
beforeEach(function beforeEachFn(done) {
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createBucket({ Bucket: azureContainerName },
|
next => s3.createBucket({ Bucket: azureContainerName },
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
next => s3.createMultipartUpload({
|
next => s3.createMultipartUpload({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
this.currentTest.uploadId = res.UploadId;
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
|
@ -61,36 +67,36 @@ describeF() {
|
||||||
afterEach(done => s3.deleteBucket({ Bucket: azureContainerName },
|
afterEach(done => s3.deleteBucket({ Bucket: azureContainerName },
|
||||||
done));
|
done));
|
||||||
|
|
||||||
it('should abort an MPU with one empty part ', function itFn(done) {
|
test('should abort an MPU with one empty part ', done => {
|
||||||
const expected = { error: true };
|
const expected = { error: true };
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => {
|
next => {
|
||||||
const partParams = Object.assign({ PartNumber: 1 },
|
const partParams = Object.assign({ PartNumber: 1 },
|
||||||
params);
|
params);
|
||||||
s3.uploadPart(partParams, err => {
|
s3.uploadPart(partParams, err => {
|
||||||
assert.strictEqual(err, null, 'Expected success, ' +
|
expect(err).toBe(null);
|
||||||
`got error: ${err}`);
|
|
||||||
return next();
|
return next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
next => s3.abortMultipartUpload(params, err => next(err)),
|
next => s3.abortMultipartUpload(params, err => next(err)),
|
||||||
next => azureCheck(azureContainerName, this.test.key,
|
next => azureCheck(azureContainerName, testContext.test.key,
|
||||||
expected, next),
|
expected, next),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should abort MPU with one part bigger than max subpart',
|
test(
|
||||||
function itFn(done) {
|
'should abort MPU with one part bigger than max subpart',
|
||||||
|
done => {
|
||||||
const expected = { error: true };
|
const expected = { error: true };
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => {
|
next => {
|
||||||
|
@ -98,20 +104,26 @@ describeF() {
|
||||||
const partParams = Object.assign(
|
const partParams = Object.assign(
|
||||||
{ PartNumber: 1, Body: body }, params);
|
{ PartNumber: 1, Body: body }, params);
|
||||||
s3.uploadPart(partParams, err => {
|
s3.uploadPart(partParams, err => {
|
||||||
assert.strictEqual(err, null, 'Expected ' +
|
expect(err).toBe(null);
|
||||||
`success, got error: ${err}`);
|
|
||||||
return next();
|
return next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
next => s3.abortMultipartUpload(params, err => next(err)),
|
next => s3.abortMultipartUpload(params, err => next(err)),
|
||||||
next => azureCheck(azureContainerName, this.test.key,
|
next => azureCheck(azureContainerName, testContext.test.key,
|
||||||
expected, next),
|
expected, next),
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with previously existing object with same key', () => {
|
describe('with previously existing object with same key', () => {
|
||||||
beforeEach(function beforeEachFn(done) {
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createBucket({ Bucket: azureContainerName },
|
next => s3.createBucket({ Bucket: azureContainerName },
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
|
@ -119,25 +131,24 @@ describeF() {
|
||||||
const body = Buffer.alloc(10);
|
const body = Buffer.alloc(10);
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint':
|
Metadata: { 'scal-location-constraint':
|
||||||
azureLocation },
|
azureLocation },
|
||||||
Body: body,
|
Body: body,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, 'Err putting object to ' +
|
expect(err).toEqual(null);
|
||||||
`azure: ${err}`);
|
|
||||||
return next();
|
return next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
next => s3.createMultipartUpload({
|
next => s3.createMultipartUpload({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
this.currentTest.uploadId = res.UploadId;
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
|
@ -157,13 +168,12 @@ describeF() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should abort MPU without deleting existing object',
|
test('should abort MPU without deleting existing object', done => {
|
||||||
function itFn(done) {
|
|
||||||
const expected = { error: false };
|
const expected = { error: false };
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => {
|
next => {
|
||||||
|
@ -171,13 +181,12 @@ describeF() {
|
||||||
const partParams = Object.assign(
|
const partParams = Object.assign(
|
||||||
{ PartNumber: 1, Body: body }, params);
|
{ PartNumber: 1, Body: body }, params);
|
||||||
s3.uploadPart(partParams, err => {
|
s3.uploadPart(partParams, err => {
|
||||||
assert.strictEqual(err, null, 'Expected ' +
|
expect(err).toBe(null);
|
||||||
`success, got error: ${err}`);
|
|
||||||
return next();
|
return next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
next => s3.abortMultipartUpload(params, err => next(err)),
|
next => s3.abortMultipartUpload(params, err => next(err)),
|
||||||
next => azureCheck(azureContainerName, this.test.key,
|
next => azureCheck(azureContainerName, testContext.test.key,
|
||||||
expected, next),
|
expected, next),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
|
@ -36,16 +36,16 @@ function getCheck(key, bucketMatch, cb) {
|
||||||
let azureKey = key;
|
let azureKey = key;
|
||||||
s3.getObject({ Bucket: azureContainerName, Key: azureKey },
|
s3.getObject({ Bucket: azureContainerName, Key: azureKey },
|
||||||
(err, s3Res) => {
|
(err, s3Res) => {
|
||||||
assert.equal(err, null, `Err getting object from S3: ${err}`);
|
expect(err).toEqual(null);
|
||||||
assert.strictEqual(s3Res.ETag, `"${s3MD5}"`);
|
expect(s3Res.ETag).toBe(`"${s3MD5}"`);
|
||||||
|
|
||||||
if (!bucketMatch) {
|
if (!bucketMatch) {
|
||||||
azureKey = `${azureContainerName}/${key}`;
|
azureKey = `${azureContainerName}/${key}`;
|
||||||
}
|
}
|
||||||
azureClient.getBlobProperties(azureContainerName, azureKey,
|
azureClient.getBlobProperties(azureContainerName, azureKey,
|
||||||
(err, azureRes) => {
|
(err, azureRes) => {
|
||||||
assert.equal(err, null, `Err getting object from Azure: ${err}`);
|
expect(err).toEqual(null);
|
||||||
assert.strictEqual(expectedContentLength, azureRes.contentLength);
|
expect(expectedContentLength).toBe(azureRes.contentLength);
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -62,9 +62,9 @@ function mpuSetup(key, location, cb) {
|
||||||
};
|
};
|
||||||
s3.createMultipartUpload(params, (err, res) => {
|
s3.createMultipartUpload(params, (err, res) => {
|
||||||
const uploadId = res.UploadId;
|
const uploadId = res.UploadId;
|
||||||
assert(uploadId);
|
expect(uploadId).toBeTruthy();
|
||||||
assert.strictEqual(res.Bucket, azureContainerName);
|
expect(res.Bucket).toBe(azureContainerName);
|
||||||
assert.strictEqual(res.Key, key);
|
expect(res.Key).toBe(key);
|
||||||
next(err, uploadId);
|
next(err, uploadId);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
@ -96,7 +96,7 @@ function mpuSetup(key, location, cb) {
|
||||||
},
|
},
|
||||||
], (err, uploadId) => {
|
], (err, uploadId) => {
|
||||||
process.stdout.write('Created MPU and put two parts\n');
|
process.stdout.write('Created MPU and put two parts\n');
|
||||||
assert.equal(err, null, `Err setting up MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
cb(uploadId, partArray);
|
cb(uploadId, partArray);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -105,7 +105,7 @@ describeSkipIfNotMultipleOrCeph('Complete MPU API for Azure data backend',
|
||||||
function testSuite() {
|
function testSuite() {
|
||||||
this.timeout(150000);
|
this.timeout(150000);
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
beforeEach(function beFn() {
|
beforeEach(() => {
|
||||||
this.currentTest.key = `somekey-${genUniqID()}`;
|
this.currentTest.key = `somekey-${genUniqID()}`;
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
|
@ -130,7 +130,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should complete an MPU on Azure', function itFn(done) {
|
test('should complete an MPU on Azure', done => {
|
||||||
mpuSetup(this.test.key, azureLocation, (uploadId, partArray) => {
|
mpuSetup(this.test.key, azureLocation, (uploadId, partArray) => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
|
@ -139,15 +139,16 @@ function testSuite() {
|
||||||
MultipartUpload: { Parts: partArray },
|
MultipartUpload: { Parts: partArray },
|
||||||
};
|
};
|
||||||
s3.completeMultipartUpload(params, err => {
|
s3.completeMultipartUpload(params, err => {
|
||||||
assert.equal(err, null, `Err completing MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
setTimeout(() => getCheck(this.test.key, true, done),
|
setTimeout(() => getCheck(this.test.key, true, done),
|
||||||
azureTimeout);
|
azureTimeout);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should complete an MPU on Azure with bucketMatch=false',
|
test(
|
||||||
function itFn(done) {
|
'should complete an MPU on Azure with bucketMatch=false',
|
||||||
|
done => {
|
||||||
mpuSetup(this.test.key, azureLocationMismatch,
|
mpuSetup(this.test.key, azureLocationMismatch,
|
||||||
(uploadId, partArray) => {
|
(uploadId, partArray) => {
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -157,15 +158,16 @@ function testSuite() {
|
||||||
MultipartUpload: { Parts: partArray },
|
MultipartUpload: { Parts: partArray },
|
||||||
};
|
};
|
||||||
s3.completeMultipartUpload(params, err => {
|
s3.completeMultipartUpload(params, err => {
|
||||||
assert.equal(err, null, `Err completing MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
setTimeout(() => getCheck(this.test.key, false, done),
|
setTimeout(() => getCheck(this.test.key, false, done),
|
||||||
azureTimeout);
|
azureTimeout);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should complete an MPU on Azure with same key as object put ' +
|
test('should complete an MPU on Azure with same key as object put ' +
|
||||||
'to file', function itFn(done) {
|
'to file', done => {
|
||||||
const body = Buffer.from('I am a body', 'utf8');
|
const body = Buffer.from('I am a body', 'utf8');
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
|
@ -173,7 +175,7 @@ function testSuite() {
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': fileLocation } },
|
Metadata: { 'scal-location-constraint': fileLocation } },
|
||||||
err => {
|
err => {
|
||||||
assert.equal(err, null, `Err putting object to file: ${err}`);
|
expect(err).toEqual(null);
|
||||||
mpuSetup(this.test.key, azureLocation,
|
mpuSetup(this.test.key, azureLocation,
|
||||||
(uploadId, partArray) => {
|
(uploadId, partArray) => {
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -183,7 +185,7 @@ function testSuite() {
|
||||||
MultipartUpload: { Parts: partArray },
|
MultipartUpload: { Parts: partArray },
|
||||||
};
|
};
|
||||||
s3.completeMultipartUpload(params, err => {
|
s3.completeMultipartUpload(params, err => {
|
||||||
assert.equal(err, null, `Err completing MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
setTimeout(() => getCheck(this.test.key, true, done),
|
setTimeout(() => getCheck(this.test.key, true, done),
|
||||||
azureTimeout);
|
azureTimeout);
|
||||||
});
|
});
|
||||||
|
@ -191,8 +193,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should complete an MPU on Azure with same key as object put ' +
|
test('should complete an MPU on Azure with same key as object put ' +
|
||||||
'to Azure', function itFn(done) {
|
'to Azure', done => {
|
||||||
const body = Buffer.from('I am a body', 'utf8');
|
const body = Buffer.from('I am a body', 'utf8');
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
|
@ -200,7 +202,7 @@ function testSuite() {
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation } },
|
Metadata: { 'scal-location-constraint': azureLocation } },
|
||||||
err => {
|
err => {
|
||||||
assert.equal(err, null, `Err putting object to Azure: ${err}`);
|
expect(err).toEqual(null);
|
||||||
mpuSetup(this.test.key, azureLocation,
|
mpuSetup(this.test.key, azureLocation,
|
||||||
(uploadId, partArray) => {
|
(uploadId, partArray) => {
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -210,7 +212,7 @@ function testSuite() {
|
||||||
MultipartUpload: { Parts: partArray },
|
MultipartUpload: { Parts: partArray },
|
||||||
};
|
};
|
||||||
s3.completeMultipartUpload(params, err => {
|
s3.completeMultipartUpload(params, err => {
|
||||||
assert.equal(err, null, `Err completing MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
setTimeout(() => getCheck(this.test.key, true, done),
|
setTimeout(() => getCheck(this.test.key, true, done),
|
||||||
azureTimeout);
|
azureTimeout);
|
||||||
});
|
});
|
||||||
|
@ -218,8 +220,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should complete an MPU on Azure with same key as object put ' +
|
test('should complete an MPU on Azure with same key as object put ' +
|
||||||
'to AWS', function itFn(done) {
|
'to AWS', done => {
|
||||||
const body = Buffer.from('I am a body', 'utf8');
|
const body = Buffer.from('I am a body', 'utf8');
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
|
@ -227,7 +229,7 @@ function testSuite() {
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation } },
|
Metadata: { 'scal-location-constraint': awsLocation } },
|
||||||
err => {
|
err => {
|
||||||
assert.equal(err, null, `Err putting object to AWS: ${err}`);
|
expect(err).toEqual(null);
|
||||||
mpuSetup(this.test.key, azureLocation,
|
mpuSetup(this.test.key, azureLocation,
|
||||||
(uploadId, partArray) => {
|
(uploadId, partArray) => {
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -237,12 +239,12 @@ function testSuite() {
|
||||||
MultipartUpload: { Parts: partArray },
|
MultipartUpload: { Parts: partArray },
|
||||||
};
|
};
|
||||||
s3.completeMultipartUpload(params, err => {
|
s3.completeMultipartUpload(params, err => {
|
||||||
assert.equal(err, null, `Err completing MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
// make sure object is gone from AWS
|
// make sure object is gone from AWS
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
this.test.awsClient.getObject({ Bucket: awsBucket,
|
this.test.awsClient.getObject({ Bucket: awsBucket,
|
||||||
Key: this.test.key }, err => {
|
Key: this.test.key }, err => {
|
||||||
assert.strictEqual(err.code, 'NoSuchKey');
|
expect(err.code).toBe('NoSuchKey');
|
||||||
getCheck(this.test.key, true, done);
|
getCheck(this.test.key, true, done);
|
||||||
});
|
});
|
||||||
}, azureTimeout);
|
}, azureTimeout);
|
||||||
|
|
|
@ -21,16 +21,16 @@ function getCheck(key, bucketMatch, cb) {
|
||||||
let gcpKey = key;
|
let gcpKey = key;
|
||||||
s3.getObject({ Bucket: bucket, Key: gcpKey },
|
s3.getObject({ Bucket: bucket, Key: gcpKey },
|
||||||
(err, s3Res) => {
|
(err, s3Res) => {
|
||||||
assert.equal(err, null, `Err getting object from S3: ${err}`);
|
expect(err).toEqual(null);
|
||||||
assert.strictEqual(s3Res.ETag, `"${s3MD5}"`);
|
expect(s3Res.ETag).toBe(`"${s3MD5}"`);
|
||||||
|
|
||||||
if (!bucketMatch) {
|
if (!bucketMatch) {
|
||||||
gcpKey = `${bucket}/${gcpKey}`;
|
gcpKey = `${bucket}/${gcpKey}`;
|
||||||
}
|
}
|
||||||
const params = { Bucket: gcpBucket, Key: gcpKey };
|
const params = { Bucket: gcpBucket, Key: gcpKey };
|
||||||
gcpClient.getObject(params, (err, gcpRes) => {
|
gcpClient.getObject(params, (err, gcpRes) => {
|
||||||
assert.equal(err, null, `Err getting object from GCP: ${err}`);
|
expect(err).toEqual(null);
|
||||||
assert.strictEqual(expectedContentLength, gcpRes.ContentLength);
|
expect(expectedContentLength).toBe(gcpRes.ContentLength);
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -47,9 +47,9 @@ function mpuSetup(key, location, cb) {
|
||||||
};
|
};
|
||||||
s3.createMultipartUpload(params, (err, res) => {
|
s3.createMultipartUpload(params, (err, res) => {
|
||||||
const uploadId = res.UploadId;
|
const uploadId = res.UploadId;
|
||||||
assert(uploadId);
|
expect(uploadId).toBeTruthy();
|
||||||
assert.strictEqual(res.Bucket, bucket);
|
expect(res.Bucket).toBe(bucket);
|
||||||
assert.strictEqual(res.Key, key);
|
expect(res.Key).toBe(key);
|
||||||
next(err, uploadId);
|
next(err, uploadId);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
@ -81,7 +81,7 @@ function mpuSetup(key, location, cb) {
|
||||||
},
|
},
|
||||||
], (err, uploadId) => {
|
], (err, uploadId) => {
|
||||||
process.stdout.write('Created MPU and put two parts\n');
|
process.stdout.write('Created MPU and put two parts\n');
|
||||||
assert.equal(err, null, `Err setting up MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
cb(uploadId, partArray);
|
cb(uploadId, partArray);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -90,7 +90,7 @@ describeSkipIfNotMultipleOrCeph('Complete MPU API for GCP data backend',
|
||||||
function testSuite() {
|
function testSuite() {
|
||||||
this.timeout(150000);
|
this.timeout(150000);
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
beforeEach(function beFn() {
|
beforeEach(() => {
|
||||||
this.currentTest.key = `somekey-${genUniqID()}`;
|
this.currentTest.key = `somekey-${genUniqID()}`;
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
|
@ -115,7 +115,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should complete an MPU on GCP', function itFn(done) {
|
test('should complete an MPU on GCP', done => {
|
||||||
mpuSetup(this.test.key, gcpLocation, (uploadId, partArray) => {
|
mpuSetup(this.test.key, gcpLocation, (uploadId, partArray) => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -125,16 +125,16 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
s3.completeMultipartUpload(params, err => {
|
s3.completeMultipartUpload(params, err => {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Err completing MPU: ${err}`);
|
|
||||||
getCheck(this.test.key, true, done);
|
getCheck(this.test.key, true, done);
|
||||||
});
|
});
|
||||||
}, gcpTimeout);
|
}, gcpTimeout);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should complete an MPU on GCP with bucketMatch=false',
|
test(
|
||||||
function itFn(done) {
|
'should complete an MPU on GCP with bucketMatch=false',
|
||||||
|
done => {
|
||||||
mpuSetup(this.test.key, gcpLocationMismatch,
|
mpuSetup(this.test.key, gcpLocationMismatch,
|
||||||
(uploadId, partArray) => {
|
(uploadId, partArray) => {
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -145,16 +145,16 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
s3.completeMultipartUpload(params, err => {
|
s3.completeMultipartUpload(params, err => {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Err completing MPU: ${err}`);
|
|
||||||
getCheck(this.test.key, false, done);
|
getCheck(this.test.key, false, done);
|
||||||
});
|
});
|
||||||
}, gcpTimeout);
|
}, gcpTimeout);
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should complete an MPU on GCP with same key as object put ' +
|
test('should complete an MPU on GCP with same key as object put ' +
|
||||||
'to file', function itFn(done) {
|
'to file', done => {
|
||||||
const body = Buffer.from('I am a body', 'utf8');
|
const body = Buffer.from('I am a body', 'utf8');
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -162,7 +162,7 @@ function testSuite() {
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': fileLocation } },
|
Metadata: { 'scal-location-constraint': fileLocation } },
|
||||||
err => {
|
err => {
|
||||||
assert.equal(err, null, `Err putting object to file: ${err}`);
|
expect(err).toEqual(null);
|
||||||
mpuSetup(this.test.key, gcpLocation,
|
mpuSetup(this.test.key, gcpLocation,
|
||||||
(uploadId, partArray) => {
|
(uploadId, partArray) => {
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -173,8 +173,7 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
s3.completeMultipartUpload(params, err => {
|
s3.completeMultipartUpload(params, err => {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Err completing MPU: ${err}`);
|
|
||||||
getCheck(this.test.key, true, done);
|
getCheck(this.test.key, true, done);
|
||||||
});
|
});
|
||||||
}, gcpTimeout);
|
}, gcpTimeout);
|
||||||
|
@ -182,8 +181,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should complete an MPU on GCP with same key as object put ' +
|
test('should complete an MPU on GCP with same key as object put ' +
|
||||||
'to GCP', function itFn(done) {
|
'to GCP', done => {
|
||||||
const body = Buffer.from('I am a body', 'utf8');
|
const body = Buffer.from('I am a body', 'utf8');
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -191,7 +190,7 @@ function testSuite() {
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': gcpLocation } },
|
Metadata: { 'scal-location-constraint': gcpLocation } },
|
||||||
err => {
|
err => {
|
||||||
assert.equal(err, null, `Err putting object to GCP: ${err}`);
|
expect(err).toEqual(null);
|
||||||
mpuSetup(this.test.key, gcpLocation,
|
mpuSetup(this.test.key, gcpLocation,
|
||||||
(uploadId, partArray) => {
|
(uploadId, partArray) => {
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -202,8 +201,7 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
s3.completeMultipartUpload(params, err => {
|
s3.completeMultipartUpload(params, err => {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Err completing MPU: ${err}`);
|
|
||||||
getCheck(this.test.key, true, done);
|
getCheck(this.test.key, true, done);
|
||||||
});
|
});
|
||||||
}, gcpTimeout);
|
}, gcpTimeout);
|
||||||
|
@ -211,8 +209,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should complete an MPU on GCP with same key as object put ' +
|
test('should complete an MPU on GCP with same key as object put ' +
|
||||||
'to AWS', function itFn(done) {
|
'to AWS', done => {
|
||||||
const body = Buffer.from('I am a body', 'utf8');
|
const body = Buffer.from('I am a body', 'utf8');
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -220,7 +218,7 @@ function testSuite() {
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation } },
|
Metadata: { 'scal-location-constraint': awsLocation } },
|
||||||
err => {
|
err => {
|
||||||
assert.equal(err, null, `Err putting object to AWS: ${err}`);
|
expect(err).toEqual(null);
|
||||||
mpuSetup(this.test.key, gcpLocation,
|
mpuSetup(this.test.key, gcpLocation,
|
||||||
(uploadId, partArray) => {
|
(uploadId, partArray) => {
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -230,12 +228,12 @@ function testSuite() {
|
||||||
MultipartUpload: { Parts: partArray },
|
MultipartUpload: { Parts: partArray },
|
||||||
};
|
};
|
||||||
s3.completeMultipartUpload(params, err => {
|
s3.completeMultipartUpload(params, err => {
|
||||||
assert.equal(err, null, `Err completing MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
// make sure object is gone from AWS
|
// make sure object is gone from AWS
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
this.test.awsClient.getObject({ Bucket: awsBucket,
|
this.test.awsClient.getObject({ Bucket: awsBucket,
|
||||||
Key: this.test.key }, err => {
|
Key: this.test.key }, err => {
|
||||||
assert.strictEqual(err.code, 'NoSuchKey');
|
expect(err.code).toBe('NoSuchKey');
|
||||||
getCheck(this.test.key, true, done);
|
getCheck(this.test.key, true, done);
|
||||||
});
|
});
|
||||||
}, gcpTimeout);
|
}, gcpTimeout);
|
||||||
|
|
|
@ -30,11 +30,11 @@ function mpuSetup(s3, key, location, cb) {
|
||||||
Metadata: { 'scal-location-constraint': location },
|
Metadata: { 'scal-location-constraint': location },
|
||||||
};
|
};
|
||||||
s3.createMultipartUpload(params, (err, res) => {
|
s3.createMultipartUpload(params, (err, res) => {
|
||||||
assert.strictEqual(err, null, `err creating mpu: ${err}`);
|
expect(err).toBe(null);
|
||||||
const uploadId = res.UploadId;
|
const uploadId = res.UploadId;
|
||||||
assert(uploadId);
|
expect(uploadId).toBeTruthy();
|
||||||
assert.strictEqual(res.Bucket, bucket);
|
expect(res.Bucket).toBe(bucket);
|
||||||
assert.strictEqual(res.Key, key);
|
expect(res.Key).toBe(key);
|
||||||
next(err, uploadId);
|
next(err, uploadId);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
@ -47,7 +47,7 @@ function mpuSetup(s3, key, location, cb) {
|
||||||
Body: data[0],
|
Body: data[0],
|
||||||
};
|
};
|
||||||
s3.uploadPart(partParams, (err, res) => {
|
s3.uploadPart(partParams, (err, res) => {
|
||||||
assert.strictEqual(err, null, `err uploading part 1: ${err}`);
|
expect(err).toBe(null);
|
||||||
partArray.push({ ETag: res.ETag, PartNumber: 1 });
|
partArray.push({ ETag: res.ETag, PartNumber: 1 });
|
||||||
next(err, uploadId);
|
next(err, uploadId);
|
||||||
});
|
});
|
||||||
|
@ -61,7 +61,7 @@ function mpuSetup(s3, key, location, cb) {
|
||||||
Body: data[1],
|
Body: data[1],
|
||||||
};
|
};
|
||||||
s3.uploadPart(partParams, (err, res) => {
|
s3.uploadPart(partParams, (err, res) => {
|
||||||
assert.strictEqual(err, null, `err uploading part 2: ${err}`);
|
expect(err).toBe(null);
|
||||||
partArray.push({ ETag: res.ETag, PartNumber: 2 });
|
partArray.push({ ETag: res.ETag, PartNumber: 2 });
|
||||||
next(err, uploadId);
|
next(err, uploadId);
|
||||||
});
|
});
|
||||||
|
@ -81,11 +81,11 @@ function completeAndAssertMpu(s3, params, cb) {
|
||||||
UploadId: uploadId,
|
UploadId: uploadId,
|
||||||
MultipartUpload: { Parts: partArray },
|
MultipartUpload: { Parts: partArray },
|
||||||
}, (err, data) => {
|
}, (err, data) => {
|
||||||
assert.strictEqual(err, null, `Err completing MPU: ${err}`);
|
expect(err).toBe(null);
|
||||||
if (expectVersionId) {
|
if (expectVersionId) {
|
||||||
assert.notEqual(data.VersionId, undefined);
|
expect(data.VersionId).not.toEqual(undefined);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(data.VersionId, undefined);
|
expect(data.VersionId).toBe(undefined);
|
||||||
}
|
}
|
||||||
const expectedVersionId = expectedGetVersionId || data.VersionId;
|
const expectedVersionId = expectedGetVersionId || data.VersionId;
|
||||||
getAndAssertResult(s3, { bucket, key, body: concattedData,
|
getAndAssertResult(s3, { bucket, key, body: concattedData,
|
||||||
|
@ -114,7 +114,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning not configured: should not return version id ' +
|
test('versioning not configured: should not return version id ' +
|
||||||
'completing mpu', done => {
|
'completing mpu', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
mpuSetup(s3, key, awsLocation, (err, uploadId, partArray) => {
|
mpuSetup(s3, key, awsLocation, (err, uploadId, partArray) => {
|
||||||
|
@ -123,9 +123,9 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning not configured: if complete mpu on already-existing ' +
|
test('versioning not configured: if complete mpu on already-existing ' +
|
||||||
'object, metadata should be overwritten but data of previous version' +
|
'object, metadata should be overwritten but data of previous version' +
|
||||||
'in AWS should not be deleted', function itF(done) {
|
'in AWS should not be deleted', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => putToAwsBackend(s3, bucket, key, '', err => next(err)),
|
next => putToAwsBackend(s3, bucket, key, '', err => next(err)),
|
||||||
|
@ -144,13 +144,14 @@ function testSuite() {
|
||||||
expectedError: 'NoSuchKey' }, next),
|
expectedError: 'NoSuchKey' }, next),
|
||||||
next => awsGetLatestVerId(key, '', next),
|
next => awsGetLatestVerId(key, '', next),
|
||||||
(awsVerId, next) => {
|
(awsVerId, next) => {
|
||||||
assert.strictEqual(awsVerId, this.test.awsVerId);
|
expect(awsVerId).toBe(this.test.awsVerId);
|
||||||
next();
|
next();
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning suspended: should not return version id completing mpu',
|
test(
|
||||||
|
'versioning suspended: should not return version id completing mpu',
|
||||||
done => {
|
done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -160,10 +161,10 @@ function testSuite() {
|
||||||
{ bucket, key, uploadId, partArray, expectVersionId: false,
|
{ bucket, key, uploadId, partArray, expectVersionId: false,
|
||||||
expectedGetVersionId: 'null' }, next),
|
expectedGetVersionId: 'null' }, next),
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('versioning enabled: should return version id completing mpu',
|
test('versioning enabled: should return version id completing mpu', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => enableVersioning(s3, bucket, next),
|
next => enableVersioning(s3, bucket, next),
|
||||||
|
|
|
@ -21,15 +21,14 @@ let s3;
|
||||||
|
|
||||||
function checkSubPart(key, uploadId, expectedParts, cb) {
|
function checkSubPart(key, uploadId, expectedParts, cb) {
|
||||||
azureClient.listBlocks(azureContainerName, key, 'all', (err, list) => {
|
azureClient.listBlocks(azureContainerName, key, 'all', (err, list) => {
|
||||||
assert.equal(err, null, 'Expected success, got error ' +
|
expect(err).toEqual(null);
|
||||||
`on call to Azure: ${err}`);
|
|
||||||
const uncommittedBlocks = list.UncommittedBlocks;
|
const uncommittedBlocks = list.UncommittedBlocks;
|
||||||
const committedBlocks = list.CommittedBlocks;
|
const committedBlocks = list.CommittedBlocks;
|
||||||
assert.strictEqual(committedBlocks, undefined);
|
expect(committedBlocks).toBe(undefined);
|
||||||
uncommittedBlocks.forEach((l, index) => {
|
uncommittedBlocks.forEach((l, index) => {
|
||||||
assert.strictEqual(l.Name, getBlockId(uploadId,
|
expect(l.Name).toBe(getBlockId(uploadId,
|
||||||
expectedParts[index].partnbr, expectedParts[index].subpartnbr));
|
expectedParts[index].partnbr, expectedParts[index].subpartnbr));
|
||||||
assert.strictEqual(l.Size, expectedParts[index].size.toString());
|
expect(l.Size).toBe(expectedParts[index].size.toString());
|
||||||
});
|
});
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
|
@ -37,11 +36,11 @@ function checkSubPart(key, uploadId, expectedParts, cb) {
|
||||||
|
|
||||||
function azureCheck(key, cb) {
|
function azureCheck(key, cb) {
|
||||||
s3.getObject({ Bucket: azureContainerName, Key: key }, (err, res) => {
|
s3.getObject({ Bucket: azureContainerName, Key: key }, (err, res) => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
assert.strictEqual(res.ETag, `"${expectedMD5}"`);
|
expect(res.ETag).toBe(`"${expectedMD5}"`);
|
||||||
azureClient.getBlobProperties(azureContainerName, key, (err, res) => {
|
azureClient.getBlobProperties(azureContainerName, key, (err, res) => {
|
||||||
const convertedMD5 = convertMD5(res.contentSettings.contentMD5);
|
const convertedMD5 = convertMD5(res.contentSettings.contentMD5);
|
||||||
assert.strictEqual(convertedMD5, expectedMD5);
|
expect(convertedMD5).toBe(expectedMD5);
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -51,93 +50,99 @@ describeSkipIfNotMultipleOrCeph('MultipleBackend put part to AZURE', function
|
||||||
describeF() {
|
describeF() {
|
||||||
this.timeout(80000);
|
this.timeout(80000);
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
beforeEach(function beforeFn() {
|
beforeEach(() => {
|
||||||
this.currentTest.key = uniqName(keyObject);
|
this.currentTest.key = uniqName(keyObject);
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
});
|
});
|
||||||
describe('with bucket location header', () => {
|
describe('with bucket location header', () => {
|
||||||
beforeEach(function beforeEachFn(done) {
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createBucket({ Bucket: azureContainerName,
|
next => s3.createBucket({ Bucket: azureContainerName,
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.createMultipartUpload({
|
next => s3.createMultipartUpload({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
this.currentTest.uploadId = res.UploadId;
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(function afterEachFn(done) {
|
afterEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.abortMultipartUpload({
|
next => s3.abortMultipartUpload({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
UploadId: this.currentTest.uploadId,
|
UploadId: testContext.currentTest.uploadId,
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.deleteBucket({ Bucket: azureContainerName },
|
next => s3.deleteBucket({ Bucket: azureContainerName },
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
], err => {
|
], err => {
|
||||||
assert.equal(err, null, `Error aborting MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put 0-byte block to Azure', function itFn(done) {
|
test('should put 0-byte block to Azure', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPart(params, (err, res) => {
|
next => s3.uploadPart(params, (err, res) => {
|
||||||
const eTagExpected = `"${azureMpuUtils.zeroByteETag}"`;
|
const eTagExpected = `"${azureMpuUtils.zeroByteETag}"`;
|
||||||
assert.strictEqual(res.ETag, eTagExpected);
|
expect(res.ETag).toBe(eTagExpected);
|
||||||
return next(err);
|
return next(err);
|
||||||
}),
|
}),
|
||||||
next => azureClient.listBlocks(azureContainerName,
|
next => azureClient.listBlocks(azureContainerName,
|
||||||
this.test.key, 'all', err => {
|
testContext.test.key, 'all', err => {
|
||||||
assert.notEqual(err, null,
|
expect(err).not.toEqual(null);
|
||||||
'Expected failure but got success');
|
expect(err.code).toBe('BlobNotFound');
|
||||||
assert.strictEqual(err.code, 'BlobNotFound');
|
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put 2 blocks to Azure', function itFn(done) {
|
test('should put 2 blocks to Azure', done => {
|
||||||
const body = Buffer.alloc(maxSubPartSize + 10);
|
const body = Buffer.alloc(maxSubPartSize + 10);
|
||||||
const parts = [{ partnbr: 1, subpartnbr: 0,
|
const parts = [{ partnbr: 1, subpartnbr: 0,
|
||||||
size: maxSubPartSize },
|
size: maxSubPartSize },
|
||||||
{ partnbr: 1, subpartnbr: 1, size: 10 }];
|
{ partnbr: 1, subpartnbr: 1, size: 10 }];
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
Body: body,
|
Body: body,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPart(params, (err, res) => {
|
next => s3.uploadPart(params, (err, res) => {
|
||||||
const eTagExpected = expectedETag(body);
|
const eTagExpected = expectedETag(body);
|
||||||
assert.strictEqual(res.ETag, eTagExpected);
|
expect(res.ETag).toBe(eTagExpected);
|
||||||
return next(err);
|
return next(err);
|
||||||
}),
|
}),
|
||||||
next => checkSubPart(this.test.key, this.test.uploadId,
|
next => checkSubPart(testContext.test.key, testContext.test.uploadId,
|
||||||
parts, next),
|
parts, next),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put 5 parts bigger than maxSubPartSize to Azure',
|
test(
|
||||||
function it(done) {
|
'should put 5 parts bigger than maxSubPartSize to Azure',
|
||||||
|
done => {
|
||||||
const body = Buffer.alloc(maxSubPartSize + 10);
|
const body = Buffer.alloc(maxSubPartSize + 10);
|
||||||
let parts = [];
|
let parts = [];
|
||||||
for (let i = 1; i < 6; i++) {
|
for (let i = 1; i < 6; i++) {
|
||||||
|
@ -150,26 +155,27 @@ describeF() {
|
||||||
const partNumber = n + 1;
|
const partNumber = n + 1;
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
PartNumber: partNumber,
|
PartNumber: partNumber,
|
||||||
Body: body,
|
Body: body,
|
||||||
};
|
};
|
||||||
s3.uploadPart(params, (err, res) => {
|
s3.uploadPart(params, (err, res) => {
|
||||||
const eTagExpected = expectedETag(body);
|
const eTagExpected = expectedETag(body);
|
||||||
assert.strictEqual(res.ETag, eTagExpected);
|
expect(res.ETag).toBe(eTagExpected);
|
||||||
return next(err);
|
return next(err);
|
||||||
});
|
});
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error: ${err}`);
|
checkSubPart(testContext.test.key, testContext.test.uploadId,
|
||||||
checkSubPart(this.test.key, this.test.uploadId,
|
|
||||||
parts, done);
|
parts, done);
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should put 5 parts smaller than maxSubPartSize to Azure',
|
test(
|
||||||
function it(done) {
|
'should put 5 parts smaller than maxSubPartSize to Azure',
|
||||||
|
done => {
|
||||||
const body = Buffer.alloc(10);
|
const body = Buffer.alloc(10);
|
||||||
let parts = [];
|
let parts = [];
|
||||||
for (let i = 1; i < 6; i++) {
|
for (let i = 1; i < 6; i++) {
|
||||||
|
@ -181,25 +187,25 @@ describeF() {
|
||||||
const partNumber = n + 1;
|
const partNumber = n + 1;
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
PartNumber: partNumber,
|
PartNumber: partNumber,
|
||||||
Body: body,
|
Body: body,
|
||||||
};
|
};
|
||||||
s3.uploadPart(params, (err, res) => {
|
s3.uploadPart(params, (err, res) => {
|
||||||
const eTagExpected = expectedETag(body);
|
const eTagExpected = expectedETag(body);
|
||||||
assert.strictEqual(res.ETag, eTagExpected);
|
expect(res.ETag).toBe(eTagExpected);
|
||||||
return next(err);
|
return next(err);
|
||||||
});
|
});
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error: ${err}`);
|
checkSubPart(testContext.test.key, testContext.test.uploadId,
|
||||||
checkSubPart(this.test.key, this.test.uploadId,
|
|
||||||
parts, done);
|
parts, done);
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should put the same part twice', function itFn(done) {
|
test('should put the same part twice', done => {
|
||||||
const body1 = Buffer.alloc(maxSubPartSize + 10);
|
const body1 = Buffer.alloc(maxSubPartSize + 10);
|
||||||
const body2 = Buffer.alloc(20);
|
const body2 = Buffer.alloc(20);
|
||||||
const parts2 = [{ partnbr: 1, subpartnbr: 0, size: 20 },
|
const parts2 = [{ partnbr: 1, subpartnbr: 0, size: 20 },
|
||||||
|
@ -207,30 +213,36 @@ describeF() {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPart({
|
next => s3.uploadPart({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
Body: body1,
|
Body: body1,
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.uploadPart({
|
next => s3.uploadPart({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
Body: body2,
|
Body: body2,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
const eTagExpected = expectedETag(body2);
|
const eTagExpected = expectedETag(body2);
|
||||||
assert.strictEqual(res.ETag, eTagExpected);
|
expect(res.ETag).toBe(eTagExpected);
|
||||||
return next(err);
|
return next(err);
|
||||||
}),
|
}),
|
||||||
next => checkSubPart(this.test.key, this.test.uploadId,
|
next => checkSubPart(testContext.test.key, testContext.test.uploadId,
|
||||||
parts2, next),
|
parts2, next),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with same key as preexisting part', () => {
|
describe('with same key as preexisting part', () => {
|
||||||
beforeEach(function beforeEachFn(done) {
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createBucket({ Bucket: azureContainerName },
|
next => s3.createBucket({ Bucket: azureContainerName },
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
|
@ -238,45 +250,44 @@ describeF() {
|
||||||
const body = Buffer.alloc(10);
|
const body = Buffer.alloc(10);
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint':
|
Metadata: { 'scal-location-constraint':
|
||||||
azureLocation },
|
azureLocation },
|
||||||
Body: body,
|
Body: body,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, 'Err putting object to ' +
|
expect(err).toEqual(null);
|
||||||
`azure: ${err}`);
|
|
||||||
return next();
|
return next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
next => s3.createMultipartUpload({
|
next => s3.createMultipartUpload({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
this.currentTest.uploadId = res.UploadId;
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(function afterEachFn(done) {
|
afterEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => {
|
next => {
|
||||||
process.stdout.write('Aborting multipart upload\n');
|
process.stdout.write('Aborting multipart upload\n');
|
||||||
s3.abortMultipartUpload({
|
s3.abortMultipartUpload({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
UploadId: this.currentTest.uploadId },
|
UploadId: testContext.currentTest.uploadId },
|
||||||
err => next(err));
|
err => next(err));
|
||||||
},
|
},
|
||||||
next => {
|
next => {
|
||||||
process.stdout.write('Deleting object\n');
|
process.stdout.write('Deleting object\n');
|
||||||
s3.deleteObject({
|
s3.deleteObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key },
|
Key: testContext.currentTest.key },
|
||||||
err => next(err));
|
err => next(err));
|
||||||
},
|
},
|
||||||
next => {
|
next => {
|
||||||
|
@ -286,26 +297,27 @@ describeF() {
|
||||||
err => next(err));
|
err => next(err));
|
||||||
},
|
},
|
||||||
], err => {
|
], err => {
|
||||||
assert.equal(err, null, `Err in afterEach: ${err}`);
|
expect(err).toEqual(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put a part without overwriting existing object',
|
test(
|
||||||
function itFn(done) {
|
'should put a part without overwriting existing object',
|
||||||
|
done => {
|
||||||
const body = Buffer.alloc(20);
|
const body = Buffer.alloc(20);
|
||||||
s3.uploadPart({
|
s3.uploadPart({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
Body: body,
|
Body: body,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.strictEqual(err, null, 'Err putting part to ' +
|
expect(err).toBe(null);
|
||||||
`Azure: ${err}`);
|
azureCheck(testContext.test.key, done);
|
||||||
azureCheck(this.test.key, done);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -315,53 +327,59 @@ describeSkipIfNotMultipleOrCeph('MultipleBackend put part to AZURE ' +
|
||||||
describeF() {
|
describeF() {
|
||||||
this.timeout(80000);
|
this.timeout(80000);
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
beforeEach(function beforeFn() {
|
beforeEach(() => {
|
||||||
this.currentTest.key = uniqName(keyObject);
|
this.currentTest.key = uniqName(keyObject);
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
});
|
});
|
||||||
describe('with bucket location header', () => {
|
describe('with bucket location header', () => {
|
||||||
beforeEach(function beforeEachFn(done) {
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createBucket({ Bucket: azureContainerName,
|
next => s3.createBucket({ Bucket: azureContainerName,
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.createMultipartUpload({
|
next => s3.createMultipartUpload({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint':
|
Metadata: { 'scal-location-constraint':
|
||||||
azureLocationMismatch },
|
azureLocationMismatch },
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
this.currentTest.uploadId = res.UploadId;
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(function afterEachFn(done) {
|
afterEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.abortMultipartUpload({
|
next => s3.abortMultipartUpload({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
UploadId: this.currentTest.uploadId,
|
UploadId: testContext.currentTest.uploadId,
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.deleteBucket({ Bucket: azureContainerName },
|
next => s3.deleteBucket({ Bucket: azureContainerName },
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
], err => {
|
], err => {
|
||||||
assert.equal(err, null, `Error aborting MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put block to AZURE location with bucketMatch' +
|
test('should put block to AZURE location with bucketMatch' +
|
||||||
' sets to false', function itFn(done) {
|
' sets to false', done => {
|
||||||
const body20 = Buffer.alloc(20);
|
const body20 = Buffer.alloc(20);
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
Body: body20,
|
Body: body20,
|
||||||
};
|
};
|
||||||
|
@ -371,12 +389,12 @@ describeF() {
|
||||||
next => s3.uploadPart(params, (err, res) => {
|
next => s3.uploadPart(params, (err, res) => {
|
||||||
const eTagExpected =
|
const eTagExpected =
|
||||||
'"441018525208457705bf09a8ee3c1093"';
|
'"441018525208457705bf09a8ee3c1093"';
|
||||||
assert.strictEqual(res.ETag, eTagExpected);
|
expect(res.ETag).toBe(eTagExpected);
|
||||||
return next(err);
|
return next(err);
|
||||||
}),
|
}),
|
||||||
next => checkSubPart(
|
next => checkSubPart(
|
||||||
`${azureContainerName}/${this.test.key}`,
|
`${azureContainerName}/${testContext.test.key}`,
|
||||||
this.test.uploadId, parts, next),
|
testContext.test.uploadId, parts, next),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -27,11 +27,10 @@ function checkMPUResult(bucket, key, uploadId, objCount, expected, cb) {
|
||||||
gcpClient.listParts(params, (err, res) => {
|
gcpClient.listParts(params, (err, res) => {
|
||||||
assert.ifError(err,
|
assert.ifError(err,
|
||||||
`Expected success, but got err ${err}`);
|
`Expected success, but got err ${err}`);
|
||||||
assert((res && res.Contents &&
|
expect(res && res.Contents &&
|
||||||
res.Contents.length === objCount));
|
res.Contents.length === objCount).toBeTruthy();
|
||||||
res.Contents.forEach(part => {
|
res.Contents.forEach(part => {
|
||||||
assert.strictEqual(
|
expect(part.ETag).toBe(`"${expected}"`);
|
||||||
part.ETag, `"${expected}"`);
|
|
||||||
});
|
});
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
|
@ -41,63 +40,69 @@ describeSkipIfNotMultipleOrCeph('MultipleBacked put part to GCP', function
|
||||||
describeFn() {
|
describeFn() {
|
||||||
this.timeout(180000);
|
this.timeout(180000);
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
beforeEach(function beforeFn() {
|
beforeEach(() => {
|
||||||
this.currentTest.key = uniqName(keyObject);
|
this.currentTest.key = uniqName(keyObject);
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with bucket location header', () => {
|
describe('with bucket location header', () => {
|
||||||
beforeEach(function beforeEachFn(done) {
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createBucket({ Bucket: bucket,
|
next => s3.createBucket({ Bucket: bucket,
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.createMultipartUpload({
|
next => s3.createMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint': gcpLocation },
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
this.currentTest.uploadId = res.UploadId;
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(function afterEachFn(done) {
|
afterEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.abortMultipartUpload({
|
next => s3.abortMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
UploadId: this.currentTest.uploadId,
|
UploadId: testContext.currentTest.uploadId,
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.deleteBucket({ Bucket: bucket },
|
next => s3.deleteBucket({ Bucket: bucket },
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
], err => {
|
], err => {
|
||||||
assert.equal(err, null, `Error aborting MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put 0-byte part to GCP', function itFn(done) {
|
test('should put 0-byte part to GCP', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPart(params, (err, res) => {
|
next => s3.uploadPart(params, (err, res) => {
|
||||||
assert.ifError(err,
|
assert.ifError(err,
|
||||||
`Expected success, but got err ${err}`);
|
`Expected success, but got err ${err}`);
|
||||||
assert.strictEqual(res.ETag, `"${emptyMD5}"`);
|
expect(res.ETag).toBe(`"${emptyMD5}"`);
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
const mpuKey =
|
const mpuKey =
|
||||||
createMpuKey(this.test.key, this.test.uploadId, 1);
|
createMpuKey(testContext.test.key, testContext.test.uploadId, 1);
|
||||||
const getParams = {
|
const getParams = {
|
||||||
Bucket: gcpBucketMPU,
|
Bucket: gcpBucketMPU,
|
||||||
Key: mpuKey,
|
Key: mpuKey,
|
||||||
|
@ -105,40 +110,39 @@ describeFn() {
|
||||||
gcpClient.getObject(getParams, (err, res) => {
|
gcpClient.getObject(getParams, (err, res) => {
|
||||||
assert.ifError(err,
|
assert.ifError(err,
|
||||||
`Expected success, but got err ${err}`);
|
`Expected success, but got err ${err}`);
|
||||||
assert.strictEqual(res.ETag, `"${emptyMD5}"`);
|
expect(res.ETag).toBe(`"${emptyMD5}"`);
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put 2 parts to GCP', function ifFn(done) {
|
test('should put 2 parts to GCP', done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => {
|
next => {
|
||||||
async.times(2, (n, cb) => {
|
async.times(2, (n, cb) => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
Body: body,
|
Body: body,
|
||||||
PartNumber: n + 1,
|
PartNumber: n + 1,
|
||||||
};
|
};
|
||||||
s3.uploadPart(params, (err, res) => {
|
s3.uploadPart(params, (err, res) => {
|
||||||
assert.ifError(err,
|
assert.ifError(err,
|
||||||
`Expected success, but got err ${err}`);
|
`Expected success, but got err ${err}`);
|
||||||
assert.strictEqual(
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
res.ETag, `"${correctMD5}"`);
|
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
}, () => next());
|
}, () => next());
|
||||||
},
|
},
|
||||||
next => checkMPUResult(
|
next => checkMPUResult(
|
||||||
gcpBucketMPU, this.test.key, this.test.uploadId,
|
gcpBucketMPU, testContext.test.key, testContext.test.uploadId,
|
||||||
2, correctMD5, next),
|
2, correctMD5, next),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put the same part twice', function ifFn(done) {
|
test('should put the same part twice', done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => {
|
next => {
|
||||||
const partBody = ['', body];
|
const partBody = ['', body];
|
||||||
|
@ -146,74 +150,78 @@ describeFn() {
|
||||||
async.timesSeries(2, (n, cb) => {
|
async.timesSeries(2, (n, cb) => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
Body: partBody[n],
|
Body: partBody[n],
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
};
|
};
|
||||||
s3.uploadPart(params, (err, res) => {
|
s3.uploadPart(params, (err, res) => {
|
||||||
assert.ifError(err,
|
assert.ifError(err,
|
||||||
`Expected success, but got err ${err}`);
|
`Expected success, but got err ${err}`);
|
||||||
assert.strictEqual(
|
expect(res.ETag).toBe(`"${partMD5[n]}"`);
|
||||||
res.ETag, `"${partMD5[n]}"`);
|
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
}, () => next());
|
}, () => next());
|
||||||
},
|
},
|
||||||
next => checkMPUResult(
|
next => checkMPUResult(
|
||||||
gcpBucketMPU, this.test.key, this.test.uploadId,
|
gcpBucketMPU, testContext.test.key, testContext.test.uploadId,
|
||||||
1, correctMD5, next),
|
1, correctMD5, next),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with same key as preexisting part', () => {
|
describe('with same key as preexisting part', () => {
|
||||||
beforeEach(function beforeEachFn(done) {
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createBucket({ Bucket: bucket },
|
next => s3.createBucket({ Bucket: bucket },
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
next => {
|
next => {
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: {
|
Metadata: {
|
||||||
'scal-location-constraint': gcpLocation },
|
'scal-location-constraint': gcpLocation },
|
||||||
Body: body,
|
Body: body,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, 'Err putting object to ' +
|
expect(err).toEqual(null);
|
||||||
`GCP: ${err}`);
|
|
||||||
return next();
|
return next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
next => s3.createMultipartUpload({
|
next => s3.createMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint': gcpLocation },
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
this.currentTest.uploadId = res.UploadId;
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(function afterEachFn(done) {
|
afterEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => {
|
next => {
|
||||||
process.stdout.write('Aborting multipart upload\n');
|
process.stdout.write('Aborting multipart upload\n');
|
||||||
s3.abortMultipartUpload({
|
s3.abortMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
UploadId: this.currentTest.uploadId },
|
UploadId: testContext.currentTest.uploadId },
|
||||||
err => next(err));
|
err => next(err));
|
||||||
},
|
},
|
||||||
next => {
|
next => {
|
||||||
process.stdout.write('Deleting object\n');
|
process.stdout.write('Deleting object\n');
|
||||||
s3.deleteObject({
|
s3.deleteObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key },
|
Key: testContext.currentTest.key },
|
||||||
err => next(err));
|
err => next(err));
|
||||||
},
|
},
|
||||||
next => {
|
next => {
|
||||||
|
@ -223,34 +231,35 @@ describeFn() {
|
||||||
err => next(err));
|
err => next(err));
|
||||||
},
|
},
|
||||||
], err => {
|
], err => {
|
||||||
assert.equal(err, null, `Err in afterEach: ${err}`);
|
expect(err).toEqual(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put a part without overwriting existing object',
|
test(
|
||||||
function itFn(done) {
|
'should put a part without overwriting existing object',
|
||||||
|
done => {
|
||||||
const body = Buffer.alloc(20);
|
const body = Buffer.alloc(20);
|
||||||
s3.uploadPart({
|
s3.uploadPart({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
Body: body,
|
Body: body,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.strictEqual(err, null, 'Err putting part to ' +
|
expect(err).toBe(null);
|
||||||
`GCP: ${err}`);
|
|
||||||
gcpClient.getObject({
|
gcpClient.getObject({
|
||||||
Bucket: gcpBucket,
|
Bucket: gcpBucket,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.ifError(err,
|
assert.ifError(err,
|
||||||
`Expected success, but got err ${err}`);
|
`Expected success, but got err ${err}`);
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -260,53 +269,59 @@ describeSkipIfNotMultipleOrCeph('MultipleBackend put part to GCP location ' +
|
||||||
describeF() {
|
describeF() {
|
||||||
this.timeout(80000);
|
this.timeout(80000);
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
beforeEach(function beforeFn() {
|
beforeEach(() => {
|
||||||
this.currentTest.key = uniqName(keyObject);
|
this.currentTest.key = uniqName(keyObject);
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
});
|
});
|
||||||
describe('with bucket location header', () => {
|
describe('with bucket location header', () => {
|
||||||
beforeEach(function beforeEachFn(done) {
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createBucket({ Bucket: bucket,
|
next => s3.createBucket({ Bucket: bucket,
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.createMultipartUpload({
|
next => s3.createMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
Metadata: { 'scal-location-constraint':
|
Metadata: { 'scal-location-constraint':
|
||||||
gcpLocationMismatch },
|
gcpLocationMismatch },
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
this.currentTest.uploadId = res.UploadId;
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(function afterEachFn(done) {
|
afterEach(done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.abortMultipartUpload({
|
next => s3.abortMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.key,
|
Key: testContext.currentTest.key,
|
||||||
UploadId: this.currentTest.uploadId,
|
UploadId: testContext.currentTest.uploadId,
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.deleteBucket({ Bucket: bucket },
|
next => s3.deleteBucket({ Bucket: bucket },
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
], err => {
|
], err => {
|
||||||
assert.equal(err, null, `Error aborting MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put part to GCP location with bucketMatch' +
|
test('should put part to GCP location with bucketMatch' +
|
||||||
' sets to false', function itFn(done) {
|
' sets to false', done => {
|
||||||
const body20 = Buffer.alloc(20);
|
const body20 = Buffer.alloc(20);
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.key,
|
Key: testContext.test.key,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
Body: body20,
|
Body: body20,
|
||||||
};
|
};
|
||||||
|
@ -314,12 +329,12 @@ describeF() {
|
||||||
'"441018525208457705bf09a8ee3c1093"';
|
'"441018525208457705bf09a8ee3c1093"';
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPart(params, (err, res) => {
|
next => s3.uploadPart(params, (err, res) => {
|
||||||
assert.strictEqual(res.ETag, eTagExpected);
|
expect(res.ETag).toBe(eTagExpected);
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
const key =
|
const key =
|
||||||
createMpuKey(this.test.key, this.test.uploadId, 1);
|
createMpuKey(testContext.test.key, testContext.test.uploadId, 1);
|
||||||
const mpuKey = `${bucket}/${key}`;
|
const mpuKey = `${bucket}/${key}`;
|
||||||
const getParams = {
|
const getParams = {
|
||||||
Bucket: gcpBucketMPU,
|
Bucket: gcpBucketMPU,
|
||||||
|
@ -328,7 +343,7 @@ describeF() {
|
||||||
gcpClient.getObject(getParams, (err, res) => {
|
gcpClient.getObject(getParams, (err, res) => {
|
||||||
assert.ifError(err,
|
assert.ifError(err,
|
||||||
`Expected success, but got err ${err}`);
|
`Expected success, but got err ${err}`);
|
||||||
assert.strictEqual(res.ETag, eTagExpected);
|
expect(res.ETag).toBe(eTagExpected);
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
|
@ -51,13 +51,13 @@ function putSourceObj(key, location, objSize, bucket, cb) {
|
||||||
sourceParams.Body = body;
|
sourceParams.Body = body;
|
||||||
}
|
}
|
||||||
s3.putObject(sourceParams, (err, result) => {
|
s3.putObject(sourceParams, (err, result) => {
|
||||||
assert.equal(err, null, `Error putting source object: ${err}`);
|
expect(err).toEqual(null);
|
||||||
if (objSize && objSize.empty) {
|
if (objSize && objSize.empty) {
|
||||||
assert.strictEqual(result.ETag, `"${emptyMD5}"`);
|
expect(result.ETag).toBe(`"${emptyMD5}"`);
|
||||||
} else if (objSize && objSize.big) {
|
} else if (objSize && objSize.big) {
|
||||||
assert.strictEqual(result.ETag, `"${bigMD5}"`);
|
expect(result.ETag).toBe(`"${bigMD5}"`);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(result.ETag, `"${normalMD5}"`);
|
expect(result.ETag).toBe(`"${normalMD5}"`);
|
||||||
}
|
}
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
|
@ -72,42 +72,40 @@ destBucket, destLoc, azureKey, mdDirective, objSize, callback) {
|
||||||
cb => s3.getObject(destGetParams, cb),
|
cb => s3.getObject(destGetParams, cb),
|
||||||
cb => azureClient.getBlobProperties(azureContainerName, azureKey, cb),
|
cb => azureClient.getBlobProperties(azureContainerName, azureKey, cb),
|
||||||
], (err, results) => {
|
], (err, results) => {
|
||||||
assert.equal(err, null, `Error in assertGetObjects: ${err}`);
|
expect(err).toEqual(null);
|
||||||
const [sourceRes, destRes, azureRes] = results;
|
const [sourceRes, destRes, azureRes] = results;
|
||||||
const convertedMD5 = convertMD5(azureRes[0].contentSettings.contentMD5);
|
const convertedMD5 = convertMD5(azureRes[0].contentSettings.contentMD5);
|
||||||
if (objSize && objSize.empty) {
|
if (objSize && objSize.empty) {
|
||||||
assert.strictEqual(sourceRes.ETag, `"${emptyMD5}"`);
|
expect(sourceRes.ETag).toBe(`"${emptyMD5}"`);
|
||||||
assert.strictEqual(destRes.ETag, `"${emptyMD5}"`);
|
expect(destRes.ETag).toBe(`"${emptyMD5}"`);
|
||||||
assert.strictEqual(convertedMD5, `${emptyMD5}`);
|
expect(convertedMD5).toBe(`${emptyMD5}`);
|
||||||
assert.strictEqual('0', azureRes[0].contentLength);
|
expect('0').toBe(azureRes[0].contentLength);
|
||||||
} else if (objSize && objSize.big) {
|
} else if (objSize && objSize.big) {
|
||||||
assert.strictEqual(sourceRes.ETag, `"${bigMD5}"`);
|
expect(sourceRes.ETag).toBe(`"${bigMD5}"`);
|
||||||
assert.strictEqual(destRes.ETag, `"${bigMD5}"`);
|
expect(destRes.ETag).toBe(`"${bigMD5}"`);
|
||||||
if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
|
if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
|
||||||
assert.strictEqual(sourceRes.ServerSideEncryption, 'AES256');
|
expect(sourceRes.ServerSideEncryption).toBe('AES256');
|
||||||
assert.strictEqual(destRes.ServerSideEncryption, 'AES256');
|
expect(destRes.ServerSideEncryption).toBe('AES256');
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(convertedMD5, `${bigMD5}`);
|
expect(convertedMD5).toBe(`${bigMD5}`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
|
if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
|
||||||
assert.strictEqual(sourceRes.ServerSideEncryption, 'AES256');
|
expect(sourceRes.ServerSideEncryption).toBe('AES256');
|
||||||
assert.strictEqual(destRes.ServerSideEncryption, 'AES256');
|
expect(destRes.ServerSideEncryption).toBe('AES256');
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(sourceRes.ETag, `"${normalMD5}"`);
|
expect(sourceRes.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(destRes.ETag, `"${normalMD5}"`);
|
expect(destRes.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(convertedMD5, `${normalMD5}`);
|
expect(convertedMD5).toBe(`${normalMD5}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (mdDirective === 'COPY') {
|
if (mdDirective === 'COPY') {
|
||||||
assert.strictEqual(sourceRes.Metadata['test-header'],
|
expect(sourceRes.Metadata['test-header']).toBe(destRes.Metadata['test-header']);
|
||||||
destRes.Metadata['test-header']);
|
expect(azureRes[0].metadata.test_header).toBe(destRes.Metadata['test-header']);
|
||||||
assert.strictEqual(azureRes[0].metadata.test_header,
|
|
||||||
destRes.Metadata['test-header']);
|
|
||||||
}
|
}
|
||||||
assert.strictEqual(sourceRes.ContentLength, destRes.ContentLength);
|
expect(sourceRes.ContentLength).toBe(destRes.ContentLength);
|
||||||
assert.strictEqual(sourceRes.Metadata[locMetaHeader], sourceLoc);
|
expect(sourceRes.Metadata[locMetaHeader]).toBe(sourceLoc);
|
||||||
assert.strictEqual(destRes.Metadata[locMetaHeader], destLoc);
|
expect(destRes.Metadata[locMetaHeader]).toBe(destLoc);
|
||||||
callback();
|
callback();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -116,7 +114,7 @@ describeSkipIfNotMultipleOrCeph('MultipleBackend object copy: Azure',
|
||||||
function testSuite() {
|
function testSuite() {
|
||||||
this.timeout(250000);
|
this.timeout(250000);
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
beforeEach(function beFn() {
|
beforeEach(() => {
|
||||||
this.currentTest.key = `azureputkey-${genUniqID()}`;
|
this.currentTest.key = `azureputkey-${genUniqID()}`;
|
||||||
this.currentTest.copyKey = `azurecopyKey-${genUniqID()}`;
|
this.currentTest.copyKey = `azurecopyKey-${genUniqID()}`;
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
|
@ -159,7 +157,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from mem to Azure', function itFn(done) {
|
test('should copy an object from mem to Azure', done => {
|
||||||
putSourceObj(this.test.key, memLocation, null, bucket, () => {
|
putSourceObj(this.test.key, memLocation, null, bucket, () => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -169,10 +167,8 @@ function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket, memLocation,
|
assertGetObjects(this.test.key, bucket, memLocation,
|
||||||
this.test.copyKey, bucket, azureLocation,
|
this.test.copyKey, bucket, azureLocation,
|
||||||
this.test.copyKey, 'REPLACE', null, done);
|
this.test.copyKey, 'REPLACE', null, done);
|
||||||
|
@ -180,8 +176,9 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object with no location contraint from mem to Azure',
|
test(
|
||||||
function itFn(done) {
|
'should copy an object with no location contraint from mem to Azure',
|
||||||
|
done => {
|
||||||
putSourceObj(this.test.key, null, null, bucket, () => {
|
putSourceObj(this.test.key, null, null, bucket, () => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
Bucket: bucketAzure,
|
Bucket: bucketAzure,
|
||||||
|
@ -190,18 +187,17 @@ function testSuite() {
|
||||||
MetadataDirective: 'COPY',
|
MetadataDirective: 'COPY',
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket, undefined,
|
assertGetObjects(this.test.key, bucket, undefined,
|
||||||
this.test.copyKey, bucketAzure, undefined,
|
this.test.copyKey, bucketAzure, undefined,
|
||||||
this.test.copyKey, 'COPY', null, done);
|
this.test.copyKey, 'COPY', null, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should copy an object from Azure to mem', function itFn(done) {
|
test('should copy an object from Azure to mem', done => {
|
||||||
putSourceObj(this.test.key, azureLocation, null, bucket, () => {
|
putSourceObj(this.test.key, azureLocation, null, bucket, () => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -211,10 +207,8 @@ function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint': memLocation },
|
Metadata: { 'scal-location-constraint': memLocation },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket, azureLocation,
|
assertGetObjects(this.test.key, bucket, azureLocation,
|
||||||
this.test.copyKey, bucket, memLocation, this.test.key,
|
this.test.copyKey, bucket, memLocation, this.test.key,
|
||||||
'REPLACE', null, done);
|
'REPLACE', null, done);
|
||||||
|
@ -222,7 +216,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from AWS to Azure', function itFn(done) {
|
test('should copy an object from AWS to Azure', done => {
|
||||||
putSourceObj(this.test.key, awsLocation, null, bucket, () => {
|
putSourceObj(this.test.key, awsLocation, null, bucket, () => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -232,10 +226,8 @@ function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket, awsLocation,
|
assertGetObjects(this.test.key, bucket, awsLocation,
|
||||||
this.test.copyKey, bucket, azureLocation,
|
this.test.copyKey, bucket, azureLocation,
|
||||||
this.test.copyKey, 'REPLACE', null, done);
|
this.test.copyKey, 'REPLACE', null, done);
|
||||||
|
@ -243,7 +235,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from Azure to AWS', function itFn(done) {
|
test('should copy an object from Azure to AWS', done => {
|
||||||
putSourceObj(this.test.key, azureLocation, null, bucket, () => {
|
putSourceObj(this.test.key, azureLocation, null, bucket, () => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -253,10 +245,8 @@ function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint': awsLocation },
|
Metadata: { 'scal-location-constraint': awsLocation },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket, azureLocation,
|
assertGetObjects(this.test.key, bucket, azureLocation,
|
||||||
this.test.copyKey, bucket, awsLocation, this.test.key,
|
this.test.copyKey, bucket, awsLocation, this.test.key,
|
||||||
'REPLACE', null, done);
|
'REPLACE', null, done);
|
||||||
|
@ -264,8 +254,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from Azure to mem with "REPLACE" directive ' +
|
test('should copy an object from Azure to mem with "REPLACE" directive ' +
|
||||||
'and no location constraint md', function itFn(done) {
|
'and no location constraint md', done => {
|
||||||
putSourceObj(this.test.key, azureLocation, null, bucket, () => {
|
putSourceObj(this.test.key, azureLocation, null, bucket, () => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -274,10 +264,8 @@ function testSuite() {
|
||||||
MetadataDirective: 'REPLACE',
|
MetadataDirective: 'REPLACE',
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket, azureLocation,
|
assertGetObjects(this.test.key, bucket, azureLocation,
|
||||||
this.test.copyKey, bucket, undefined, this.test.key,
|
this.test.copyKey, bucket, undefined, this.test.key,
|
||||||
'REPLACE', null, done);
|
'REPLACE', null, done);
|
||||||
|
@ -285,8 +273,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from mem to Azure with "REPLACE" directive ' +
|
test('should copy an object from mem to Azure with "REPLACE" directive ' +
|
||||||
'and no location constraint md', function itFn(done) {
|
'and no location constraint md', done => {
|
||||||
putSourceObj(this.test.key, null, null, bucket, () => {
|
putSourceObj(this.test.key, null, null, bucket, () => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
Bucket: bucketAzure,
|
Bucket: bucketAzure,
|
||||||
|
@ -295,10 +283,8 @@ function testSuite() {
|
||||||
MetadataDirective: 'REPLACE',
|
MetadataDirective: 'REPLACE',
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket, undefined,
|
assertGetObjects(this.test.key, bucket, undefined,
|
||||||
this.test.copyKey, bucketAzure, undefined,
|
this.test.copyKey, bucketAzure, undefined,
|
||||||
this.test.copyKey, 'REPLACE', null, done);
|
this.test.copyKey, 'REPLACE', null, done);
|
||||||
|
@ -306,9 +292,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from Azure to Azure showing sending ' +
|
test('should copy an object from Azure to Azure showing sending ' +
|
||||||
'metadata location constraint this doesn\'t matter with COPY directive',
|
'metadata location constraint this doesn\'t matter with COPY directive', done => {
|
||||||
function itFn(done) {
|
|
||||||
putSourceObj(this.test.key, azureLocation, null, bucketAzure,
|
putSourceObj(this.test.key, azureLocation, null, bucketAzure,
|
||||||
() => {
|
() => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -319,10 +304,8 @@ function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint': memLocation },
|
Metadata: { 'scal-location-constraint': memLocation },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucketAzure, azureLocation,
|
assertGetObjects(this.test.key, bucketAzure, azureLocation,
|
||||||
this.test.copyKey, bucketAzure, azureLocation,
|
this.test.copyKey, bucketAzure, azureLocation,
|
||||||
this.test.copyKey, 'COPY', null, done);
|
this.test.copyKey, 'COPY', null, done);
|
||||||
|
@ -330,9 +313,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object with no location constraint from Azure to ' +
|
test('should copy an object with no location constraint from Azure to ' +
|
||||||
'Azure relying on the bucket location constraint',
|
'Azure relying on the bucket location constraint', done => {
|
||||||
function itFn(done) {
|
|
||||||
putSourceObj(this.test.key, null, null, bucketAzure,
|
putSourceObj(this.test.key, null, null, bucketAzure,
|
||||||
() => {
|
() => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -342,10 +324,8 @@ function testSuite() {
|
||||||
MetadataDirective: 'COPY',
|
MetadataDirective: 'COPY',
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucketAzure, undefined,
|
assertGetObjects(this.test.key, bucketAzure, undefined,
|
||||||
this.test.copyKey, bucketAzure, undefined,
|
this.test.copyKey, bucketAzure, undefined,
|
||||||
this.test.copyKey, 'COPY', null, done);
|
this.test.copyKey, 'COPY', null, done);
|
||||||
|
@ -353,8 +333,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from Azure to mem because bucket ' +
|
test('should copy an object from Azure to mem because bucket ' +
|
||||||
'destination location is mem', function itFn(done) {
|
'destination location is mem', done => {
|
||||||
putSourceObj(this.test.key, azureLocation, null, bucket, () => {
|
putSourceObj(this.test.key, azureLocation, null, bucket, () => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -363,10 +343,8 @@ function testSuite() {
|
||||||
MetadataDirective: 'COPY',
|
MetadataDirective: 'COPY',
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket, azureLocation,
|
assertGetObjects(this.test.key, bucket, azureLocation,
|
||||||
this.test.copyKey, bucket, memLocation,
|
this.test.copyKey, bucket, memLocation,
|
||||||
this.test.key, 'COPY', null, done);
|
this.test.key, 'COPY', null, done);
|
||||||
|
@ -374,9 +352,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object on Azure to a different Azure ' +
|
test('should copy an object on Azure to a different Azure ' +
|
||||||
'account without source object READ access',
|
'account without source object READ access', done => {
|
||||||
function itFn(done) {
|
|
||||||
putSourceObj(this.test.key, azureLocation2, null, bucket, () => {
|
putSourceObj(this.test.key, azureLocation2, null, bucket, () => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -386,10 +363,8 @@ function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket, azureLocation2,
|
assertGetObjects(this.test.key, bucket, azureLocation2,
|
||||||
this.test.copyKey, bucket, azureLocation,
|
this.test.copyKey, bucket, azureLocation,
|
||||||
this.test.copyKey, 'REPLACE', null, done);
|
this.test.copyKey, 'REPLACE', null, done);
|
||||||
|
@ -397,9 +372,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy a 5MB object on Azure to a different Azure ' +
|
test('should copy a 5MB object on Azure to a different Azure ' +
|
||||||
'account without source object READ access',
|
'account without source object READ access', done => {
|
||||||
function itFn(done) {
|
|
||||||
putSourceObj(this.test.key, azureLocation2, { big: true }, bucket,
|
putSourceObj(this.test.key, azureLocation2, { big: true }, bucket,
|
||||||
() => {
|
() => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -410,10 +384,8 @@ function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${bigMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${bigMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket, azureLocation2,
|
assertGetObjects(this.test.key, bucket, azureLocation2,
|
||||||
this.test.copyKey, bucket, azureLocation,
|
this.test.copyKey, bucket, azureLocation,
|
||||||
this.test.copyKey, 'REPLACE', { big: true }, done);
|
this.test.copyKey, 'REPLACE', { big: true }, done);
|
||||||
|
@ -421,9 +393,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from bucketmatch=false ' +
|
test('should copy an object from bucketmatch=false ' +
|
||||||
'Azure location to MPU with a bucketmatch=false Azure location',
|
'Azure location to MPU with a bucketmatch=false Azure location', done => {
|
||||||
function itFn(done) {
|
|
||||||
putSourceObj(this.test.key, azureLocationMismatch, null, bucket,
|
putSourceObj(this.test.key, azureLocationMismatch, null, bucket,
|
||||||
() => {
|
() => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -435,10 +406,8 @@ function testSuite() {
|
||||||
azureLocationMismatch },
|
azureLocationMismatch },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket,
|
assertGetObjects(this.test.key, bucket,
|
||||||
azureLocationMismatch,
|
azureLocationMismatch,
|
||||||
this.test.copyKey, bucket, azureLocationMismatch,
|
this.test.copyKey, bucket, azureLocationMismatch,
|
||||||
|
@ -448,9 +417,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from bucketmatch=false ' +
|
test('should copy an object from bucketmatch=false ' +
|
||||||
'Azure location to MPU with a bucketmatch=true Azure location',
|
'Azure location to MPU with a bucketmatch=true Azure location', done => {
|
||||||
function itFn(done) {
|
|
||||||
putSourceObj(this.test.key, azureLocationMismatch, null, bucket,
|
putSourceObj(this.test.key, azureLocationMismatch, null, bucket,
|
||||||
() => {
|
() => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -461,10 +429,8 @@ function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket,
|
assertGetObjects(this.test.key, bucket,
|
||||||
azureLocationMismatch,
|
azureLocationMismatch,
|
||||||
this.test.copyKey, bucket, azureLocation,
|
this.test.copyKey, bucket, azureLocation,
|
||||||
|
@ -473,9 +439,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from bucketmatch=true ' +
|
test('should copy an object from bucketmatch=true ' +
|
||||||
'Azure location to MPU with a bucketmatch=false Azure location',
|
'Azure location to MPU with a bucketmatch=false Azure location', done => {
|
||||||
function itFn(done) {
|
|
||||||
putSourceObj(this.test.key, azureLocation, null, bucket, () => {
|
putSourceObj(this.test.key, azureLocation, null, bucket, () => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -486,10 +451,8 @@ function testSuite() {
|
||||||
azureLocationMismatch },
|
azureLocationMismatch },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket,
|
assertGetObjects(this.test.key, bucket,
|
||||||
azureLocation,
|
azureLocation,
|
||||||
this.test.copyKey, bucket, azureLocationMismatch,
|
this.test.copyKey, bucket, azureLocationMismatch,
|
||||||
|
@ -499,8 +462,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy a 0-byte object from mem to Azure',
|
test('should copy a 0-byte object from mem to Azure', done => {
|
||||||
function itFn(done) {
|
|
||||||
putSourceObj(this.test.key, memLocation, { empty: true }, bucket,
|
putSourceObj(this.test.key, memLocation, { empty: true }, bucket,
|
||||||
() => {
|
() => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -511,10 +473,8 @@ function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${emptyMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${emptyMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket, memLocation,
|
assertGetObjects(this.test.key, bucket, memLocation,
|
||||||
this.test.copyKey, bucket, azureLocation,
|
this.test.copyKey, bucket, azureLocation,
|
||||||
this.test.copyKey, 'REPLACE', { empty: true }, done);
|
this.test.copyKey, 'REPLACE', { empty: true }, done);
|
||||||
|
@ -522,7 +482,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy a 0-byte object on Azure', function itFn(done) {
|
test('should copy a 0-byte object on Azure', done => {
|
||||||
putSourceObj(this.test.key, azureLocation, { empty: true }, bucket,
|
putSourceObj(this.test.key, azureLocation, { empty: true }, bucket,
|
||||||
() => {
|
() => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -533,10 +493,8 @@ function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${emptyMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${emptyMD5}"`);
|
|
||||||
assertGetObjects(this.test.key, bucket, azureLocation,
|
assertGetObjects(this.test.key, bucket, azureLocation,
|
||||||
this.test.copyKey, bucket, azureLocation,
|
this.test.copyKey, bucket, azureLocation,
|
||||||
this.test.copyKey, 'REPLACE', { empty: true }, done);
|
this.test.copyKey, 'REPLACE', { empty: true }, done);
|
||||||
|
@ -544,7 +502,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy a 5MB object from mem to Azure', function itFn(done) {
|
test('should copy a 5MB object from mem to Azure', done => {
|
||||||
putSourceObj(this.test.key, memLocation, { big: true }, bucket,
|
putSourceObj(this.test.key, memLocation, { big: true }, bucket,
|
||||||
() => {
|
() => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -555,9 +513,8 @@ function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, `Err copying object: ${err}`);
|
expect(err).toEqual(null);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
expect(result.CopyObjectResult.ETag).toBe(`"${bigMD5}"`);
|
||||||
`"${bigMD5}"`);
|
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
assertGetObjects(this.test.key, bucket, memLocation,
|
assertGetObjects(this.test.key, bucket, memLocation,
|
||||||
this.test.copyKey, bucket, azureLocation,
|
this.test.copyKey, bucket, azureLocation,
|
||||||
|
@ -567,7 +524,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy a 5MB object on Azure', function itFn(done) {
|
test('should copy a 5MB object on Azure', done => {
|
||||||
putSourceObj(this.test.key, azureLocation, { big: true }, bucket,
|
putSourceObj(this.test.key, azureLocation, { big: true }, bucket,
|
||||||
() => {
|
() => {
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -578,9 +535,8 @@ function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, `Err copying object: ${err}`);
|
expect(err).toEqual(null);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
expect(result.CopyObjectResult.ETag).toBe(`"${bigMD5}"`);
|
||||||
`"${bigMD5}"`);
|
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
assertGetObjects(this.test.key, bucket, azureLocation,
|
assertGetObjects(this.test.key, bucket, azureLocation,
|
||||||
this.test.copyKey, bucket, azureLocation,
|
this.test.copyKey, bucket, azureLocation,
|
||||||
|
@ -590,14 +546,13 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if Azure source object has ' +
|
test('should return error if Azure source object has ' +
|
||||||
'been deleted', function itFn(done) {
|
'been deleted', done => {
|
||||||
putSourceObj(this.test.key, azureLocation, null, bucket,
|
putSourceObj(this.test.key, azureLocation, null, bucket,
|
||||||
() => {
|
() => {
|
||||||
azureClient.deleteBlob(azureContainerName, this.test.key,
|
azureClient.deleteBlob(azureContainerName, this.test.key,
|
||||||
err => {
|
err => {
|
||||||
assert.equal(err, null, 'Error deleting object from ' +
|
expect(err).toEqual(null);
|
||||||
`Azure: ${err}`);
|
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.copyKey,
|
Key: this.test.copyKey,
|
||||||
|
@ -605,7 +560,7 @@ function testSuite() {
|
||||||
MetadataDirective: 'COPY',
|
MetadataDirective: 'COPY',
|
||||||
};
|
};
|
||||||
s3.copyObject(copyParams, err => {
|
s3.copyObject(copyParams, err => {
|
||||||
assert.strictEqual(err.code, 'ServiceUnavailable');
|
expect(err.code).toBe('ServiceUnavailable');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -38,11 +38,11 @@ function putSourceObj(location, isEmptyObj, bucket, cb) {
|
||||||
}
|
}
|
||||||
process.stdout.write('Putting source object\n');
|
process.stdout.write('Putting source object\n');
|
||||||
s3.putObject(sourceParams, (err, result) => {
|
s3.putObject(sourceParams, (err, result) => {
|
||||||
assert.equal(err, null, `Error putting source object: ${err}`);
|
expect(err).toEqual(null);
|
||||||
if (isEmptyObj) {
|
if (isEmptyObj) {
|
||||||
assert.strictEqual(result.ETag, `"${emptyMD5}"`);
|
expect(result.ETag).toBe(`"${emptyMD5}"`);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(result.ETag, `"${correctMD5}"`);
|
expect(result.ETag).toBe(`"${correctMD5}"`);
|
||||||
}
|
}
|
||||||
cb(key);
|
cb(key);
|
||||||
});
|
});
|
||||||
|
@ -61,47 +61,45 @@ callback) {
|
||||||
cb => s3.getObject(destGetParams, cb),
|
cb => s3.getObject(destGetParams, cb),
|
||||||
cb => awsS3.getObject(awsParams, cb),
|
cb => awsS3.getObject(awsParams, cb),
|
||||||
], (err, results) => {
|
], (err, results) => {
|
||||||
assert.equal(err, null, `Error in assertGetObjects: ${err}`);
|
expect(err).toEqual(null);
|
||||||
const [sourceRes, destRes, awsRes] = results;
|
const [sourceRes, destRes, awsRes] = results;
|
||||||
if (isEmptyObj) {
|
if (isEmptyObj) {
|
||||||
assert.strictEqual(sourceRes.ETag, `"${emptyMD5}"`);
|
expect(sourceRes.ETag).toBe(`"${emptyMD5}"`);
|
||||||
assert.strictEqual(destRes.ETag, `"${emptyMD5}"`);
|
expect(destRes.ETag).toBe(`"${emptyMD5}"`);
|
||||||
assert.strictEqual(awsRes.ETag, `"${emptyMD5}"`);
|
expect(awsRes.ETag).toBe(`"${emptyMD5}"`);
|
||||||
} else if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
|
} else if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
|
||||||
assert.strictEqual(sourceRes.ServerSideEncryption, 'AES256');
|
expect(sourceRes.ServerSideEncryption).toBe('AES256');
|
||||||
assert.strictEqual(destRes.ServerSideEncryption, 'AES256');
|
expect(destRes.ServerSideEncryption).toBe('AES256');
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(sourceRes.ETag, `"${correctMD5}"`);
|
expect(sourceRes.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(destRes.ETag, `"${correctMD5}"`);
|
expect(destRes.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.deepStrictEqual(sourceRes.Body, destRes.Body);
|
assert.deepStrictEqual(sourceRes.Body, destRes.Body);
|
||||||
assert.strictEqual(awsRes.ETag, `"${correctMD5}"`);
|
expect(awsRes.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.deepStrictEqual(sourceRes.Body, awsRes.Body);
|
assert.deepStrictEqual(sourceRes.Body, awsRes.Body);
|
||||||
}
|
}
|
||||||
if (destLoc === awsLocationEncryption) {
|
if (destLoc === awsLocationEncryption) {
|
||||||
assert.strictEqual(awsRes.ServerSideEncryption, 'AES256');
|
expect(awsRes.ServerSideEncryption).toBe('AES256');
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(awsRes.ServerSideEncryption, undefined);
|
expect(awsRes.ServerSideEncryption).toBe(undefined);
|
||||||
}
|
}
|
||||||
if (mdDirective === 'COPY') {
|
if (mdDirective === 'COPY') {
|
||||||
assert.deepStrictEqual(sourceRes.Metadata['test-header'],
|
assert.deepStrictEqual(sourceRes.Metadata['test-header'],
|
||||||
destRes.Metadata['test-header']);
|
destRes.Metadata['test-header']);
|
||||||
} else if (mdDirective === 'REPLACE') {
|
} else if (mdDirective === 'REPLACE') {
|
||||||
assert.strictEqual(destRes.Metadata['test-header'],
|
expect(destRes.Metadata['test-header']).toBe(undefined);
|
||||||
undefined);
|
|
||||||
}
|
}
|
||||||
if (destLoc === awsLocation) {
|
if (destLoc === awsLocation) {
|
||||||
assert.strictEqual(awsRes.Metadata[locMetaHeader], destLoc);
|
expect(awsRes.Metadata[locMetaHeader]).toBe(destLoc);
|
||||||
if (mdDirective === 'COPY') {
|
if (mdDirective === 'COPY') {
|
||||||
assert.deepStrictEqual(sourceRes.Metadata['test-header'],
|
assert.deepStrictEqual(sourceRes.Metadata['test-header'],
|
||||||
awsRes.Metadata['test-header']);
|
awsRes.Metadata['test-header']);
|
||||||
} else if (mdDirective === 'REPLACE') {
|
} else if (mdDirective === 'REPLACE') {
|
||||||
assert.strictEqual(awsRes.Metadata['test-header'],
|
expect(awsRes.Metadata['test-header']).toBe(undefined);
|
||||||
undefined);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert.strictEqual(sourceRes.ContentLength, destRes.ContentLength);
|
expect(sourceRes.ContentLength).toBe(destRes.ContentLength);
|
||||||
assert.strictEqual(sourceRes.Metadata[locMetaHeader], sourceLoc);
|
expect(sourceRes.Metadata[locMetaHeader]).toBe(sourceLoc);
|
||||||
assert.strictEqual(destRes.Metadata[locMetaHeader], destLoc);
|
expect(destRes.Metadata[locMetaHeader]).toBe(destLoc);
|
||||||
callback();
|
callback();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -163,9 +161,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from mem to AWS relying on ' +
|
test('should copy an object from mem to AWS relying on ' +
|
||||||
'destination bucket location',
|
'destination bucket location', done => {
|
||||||
done => {
|
|
||||||
putSourceObj(memLocation, false, bucket, key => {
|
putSourceObj(memLocation, false, bucket, key => {
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -176,10 +173,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, memLocation, copyKey,
|
assertGetObjects(key, bucket, memLocation, copyKey,
|
||||||
bucketAws, awsLocation, copyKey, 'COPY', false, awsS3,
|
bucketAws, awsLocation, copyKey, 'COPY', false, awsS3,
|
||||||
awsLocation, done);
|
awsLocation, done);
|
||||||
|
@ -187,9 +182,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from Azure to AWS relying on ' +
|
test('should copy an object from Azure to AWS relying on ' +
|
||||||
'destination bucket location',
|
'destination bucket location', done => {
|
||||||
done => {
|
|
||||||
putSourceObj(azureLocation, false, bucket, key => {
|
putSourceObj(azureLocation, false, bucket, key => {
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -200,10 +194,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, azureLocation, copyKey,
|
assertGetObjects(key, bucket, azureLocation, copyKey,
|
||||||
bucketAws, awsLocation, copyKey, 'COPY', false, awsS3,
|
bucketAws, awsLocation, copyKey, 'COPY', false, awsS3,
|
||||||
awsLocation, done);
|
awsLocation, done);
|
||||||
|
@ -211,9 +203,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object without location contraint from mem ' +
|
test('should copy an object without location contraint from mem ' +
|
||||||
'to AWS relying on destination bucket location',
|
'to AWS relying on destination bucket location', done => {
|
||||||
done => {
|
|
||||||
putSourceObj(null, false, bucket, key => {
|
putSourceObj(null, false, bucket, key => {
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -224,10 +215,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, undefined, copyKey,
|
assertGetObjects(key, bucket, undefined, copyKey,
|
||||||
bucketAws, undefined, copyKey, 'COPY', false, awsS3,
|
bucketAws, undefined, copyKey, 'COPY', false, awsS3,
|
||||||
awsLocation, done);
|
awsLocation, done);
|
||||||
|
@ -235,9 +224,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from AWS to mem relying on destination ' +
|
test('should copy an object from AWS to mem relying on destination ' +
|
||||||
'bucket location',
|
'bucket location', done => {
|
||||||
done => {
|
|
||||||
putSourceObj(awsLocation, false, bucketAws, key => {
|
putSourceObj(awsLocation, false, bucketAws, key => {
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -248,10 +236,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, bucketAws, awsLocation, copyKey,
|
assertGetObjects(key, bucketAws, awsLocation, copyKey,
|
||||||
bucket, memLocation, key, 'COPY', false, awsS3,
|
bucket, memLocation, key, 'COPY', false, awsS3,
|
||||||
awsLocation, done);
|
awsLocation, done);
|
||||||
|
@ -259,7 +245,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from mem to AWS', done => {
|
test('should copy an object from mem to AWS', done => {
|
||||||
putSourceObj(memLocation, false, bucket, key => {
|
putSourceObj(memLocation, false, bucket, key => {
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -272,10 +258,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, memLocation, copyKey, bucket,
|
assertGetObjects(key, bucket, memLocation, copyKey, bucket,
|
||||||
awsLocation, copyKey, 'REPLACE', false, awsS3,
|
awsLocation, copyKey, 'REPLACE', false, awsS3,
|
||||||
awsLocation, done);
|
awsLocation, done);
|
||||||
|
@ -297,10 +281,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, memLocation, copyKey, bucket,
|
assertGetObjects(key, bucket, memLocation, copyKey, bucket,
|
||||||
awsLocationEncryption, copyKey, 'REPLACE', false,
|
awsLocationEncryption, copyKey, 'REPLACE', false,
|
||||||
awsS3, awsLocation, done);
|
awsS3, awsLocation, done);
|
||||||
|
@ -308,7 +290,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from AWS to mem with encryption with ' +
|
test('should copy an object from AWS to mem with encryption with ' +
|
||||||
'REPLACE directive but no location constraint', done => {
|
'REPLACE directive but no location constraint', done => {
|
||||||
putSourceObj(awsLocation, false, bucket, key => {
|
putSourceObj(awsLocation, false, bucket, key => {
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
|
@ -320,10 +302,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, awsLocation, copyKey, bucket,
|
assertGetObjects(key, bucket, awsLocation, copyKey, bucket,
|
||||||
undefined, key, 'REPLACE', false,
|
undefined, key, 'REPLACE', false,
|
||||||
awsS3, awsLocation, done);
|
awsS3, awsLocation, done);
|
||||||
|
@ -346,10 +326,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, awsLocation, copyKey, bucket,
|
assertGetObjects(key, bucket, awsLocation, copyKey, bucket,
|
||||||
awsLocationEncryption, copyKey, 'REPLACE', false, awsS3,
|
awsLocationEncryption, copyKey, 'REPLACE', false, awsS3,
|
||||||
awsLocation, done);
|
awsLocation, done);
|
||||||
|
@ -370,10 +348,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, awsServerSideEncryptionbucket,
|
assertGetObjects(key, awsServerSideEncryptionbucket,
|
||||||
awsLocation, copyKey, awsServerSideEncryptionbucket,
|
awsLocation, copyKey, awsServerSideEncryptionbucket,
|
||||||
awsLocationEncryption, copyKey, 'COPY',
|
awsLocationEncryption, copyKey, 'COPY',
|
||||||
|
@ -382,7 +358,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from mem to AWS with encryption with ' +
|
test('should copy an object from mem to AWS with encryption with ' +
|
||||||
'REPLACE directive but no location constraint', done => {
|
'REPLACE directive but no location constraint', done => {
|
||||||
putSourceObj(null, false, bucket, key => {
|
putSourceObj(null, false, bucket, key => {
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
|
@ -394,10 +370,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, undefined, copyKey,
|
assertGetObjects(key, bucket, undefined, copyKey,
|
||||||
bucketAws, undefined, copyKey, 'REPLACE', false,
|
bucketAws, undefined, copyKey, 'REPLACE', false,
|
||||||
awsS3, awsLocation, done);
|
awsS3, awsLocation, done);
|
||||||
|
@ -405,9 +379,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from AWS to mem with "COPY" ' +
|
test('should copy an object from AWS to mem with "COPY" ' +
|
||||||
'directive and aws location metadata',
|
'directive and aws location metadata', done => {
|
||||||
done => {
|
|
||||||
putSourceObj(awsLocation, false, bucket, key => {
|
putSourceObj(awsLocation, false, bucket, key => {
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -420,10 +393,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, awsLocation, copyKey, bucket,
|
assertGetObjects(key, bucket, awsLocation, copyKey, bucket,
|
||||||
memLocation, key, 'COPY', false, awsS3,
|
memLocation, key, 'COPY', false, awsS3,
|
||||||
awsLocation, done);
|
awsLocation, done);
|
||||||
|
@ -431,7 +402,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object on AWS', done => {
|
test('should copy an object on AWS', done => {
|
||||||
putSourceObj(awsLocation, false, bucket, key => {
|
putSourceObj(awsLocation, false, bucket, key => {
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -443,10 +414,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, awsLocation, copyKey, bucket,
|
assertGetObjects(key, bucket, awsLocation, copyKey, bucket,
|
||||||
awsLocation, copyKey, 'REPLACE', false, awsS3,
|
awsLocation, copyKey, 'REPLACE', false, awsS3,
|
||||||
awsLocation, done);
|
awsLocation, done);
|
||||||
|
@ -454,9 +423,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object on AWS location with bucketMatch equals ' +
|
test('should copy an object on AWS location with bucketMatch equals ' +
|
||||||
'false to a different AWS location with bucketMatch equals true',
|
'false to a different AWS location with bucketMatch equals true', done => {
|
||||||
done => {
|
|
||||||
putSourceObj(awsLocationMismatch, false, bucket, key => {
|
putSourceObj(awsLocationMismatch, false, bucket, key => {
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -469,10 +437,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, awsLocationMismatch, copyKey,
|
assertGetObjects(key, bucket, awsLocationMismatch, copyKey,
|
||||||
bucket, awsLocation, copyKey, 'REPLACE', false, awsS3,
|
bucket, awsLocation, copyKey, 'REPLACE', false, awsS3,
|
||||||
awsLocation, done);
|
awsLocation, done);
|
||||||
|
@ -480,9 +446,8 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object on AWS to a different AWS location ' +
|
test('should copy an object on AWS to a different AWS location ' +
|
||||||
'with source object READ access',
|
'with source object READ access', done => {
|
||||||
done => {
|
|
||||||
const awsConfig2 = getRealAwsConfig(awsLocation2);
|
const awsConfig2 = getRealAwsConfig(awsLocation2);
|
||||||
const awsS3Two = new AWS.S3(awsConfig2);
|
const awsS3Two = new AWS.S3(awsConfig2);
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
|
@ -506,10 +471,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success ' +
|
expect(err).toEqual(null);
|
||||||
`but got error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
next(err, key);
|
next(err, key);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
@ -535,13 +498,13 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, err => {
|
s3.copyObject(copyParams, err => {
|
||||||
assert.strictEqual(err.code, 'AccessDenied');
|
expect(err.code).toBe('AccessDenied');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object on AWS with REPLACE', done => {
|
test('should copy an object on AWS with REPLACE', done => {
|
||||||
putSourceObj(awsLocation, false, bucket, key => {
|
putSourceObj(awsLocation, false, bucket, key => {
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -554,10 +517,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${correctMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, awsLocation, copyKey, bucket,
|
assertGetObjects(key, bucket, awsLocation, copyKey, bucket,
|
||||||
awsLocation, copyKey, 'REPLACE', false, awsS3,
|
awsLocation, copyKey, 'REPLACE', false, awsS3,
|
||||||
awsLocation, done);
|
awsLocation, done);
|
||||||
|
@ -565,7 +526,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy a 0-byte object from mem to AWS', done => {
|
test('should copy a 0-byte object from mem to AWS', done => {
|
||||||
putSourceObj(memLocation, true, bucket, key => {
|
putSourceObj(memLocation, true, bucket, key => {
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -578,10 +539,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${emptyMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${emptyMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, memLocation, copyKey, bucket,
|
assertGetObjects(key, bucket, memLocation, copyKey, bucket,
|
||||||
awsLocation, copyKey, 'REPLACE', true, awsS3,
|
awsLocation, copyKey, 'REPLACE', true, awsS3,
|
||||||
awsLocation, done);
|
awsLocation, done);
|
||||||
|
@ -589,7 +548,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy a 0-byte object on AWS', done => {
|
test('should copy a 0-byte object on AWS', done => {
|
||||||
putSourceObj(awsLocation, true, bucket, key => {
|
putSourceObj(awsLocation, true, bucket, key => {
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
const copyParams = {
|
const copyParams = {
|
||||||
|
@ -601,10 +560,8 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, (err, result) => {
|
s3.copyObject(copyParams, (err, result) => {
|
||||||
assert.equal(err, null, 'Expected success but got ' +
|
expect(err).toEqual(null);
|
||||||
`error: ${err}`);
|
expect(result.CopyObjectResult.ETag).toBe(`"${emptyMD5}"`);
|
||||||
assert.strictEqual(result.CopyObjectResult.ETag,
|
|
||||||
`"${emptyMD5}"`);
|
|
||||||
assertGetObjects(key, bucket, awsLocation, copyKey, bucket,
|
assertGetObjects(key, bucket, awsLocation, copyKey, bucket,
|
||||||
awsLocation, copyKey, 'REPLACE', true, awsS3,
|
awsLocation, copyKey, 'REPLACE', true, awsS3,
|
||||||
awsLocation, done);
|
awsLocation, done);
|
||||||
|
@ -612,14 +569,13 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if AWS source object has ' +
|
test('should return error if AWS source object has ' +
|
||||||
'been deleted', done => {
|
'been deleted', done => {
|
||||||
putSourceObj(awsLocation, false, bucket, key => {
|
putSourceObj(awsLocation, false, bucket, key => {
|
||||||
const awsBucket =
|
const awsBucket =
|
||||||
config.locationConstraints[awsLocation].details.bucketName;
|
config.locationConstraints[awsLocation].details.bucketName;
|
||||||
awsS3.deleteObject({ Bucket: awsBucket, Key: key }, err => {
|
awsS3.deleteObject({ Bucket: awsBucket, Key: key }, err => {
|
||||||
assert.equal(err, null, 'Error deleting object from AWS: ' +
|
expect(err).toEqual(null);
|
||||||
`${err}`);
|
|
||||||
const copyKey = `copyKey-${genUniqID()}`;
|
const copyKey = `copyKey-${genUniqID()}`;
|
||||||
const copyParams = { Bucket: bucket, Key: copyKey,
|
const copyParams = { Bucket: bucket, Key: copyKey,
|
||||||
CopySource: `/${bucket}/${key}`,
|
CopySource: `/${bucket}/${key}`,
|
||||||
|
@ -628,7 +584,7 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Copying object\n');
|
process.stdout.write('Copying object\n');
|
||||||
s3.copyObject(copyParams, err => {
|
s3.copyObject(copyParams, err => {
|
||||||
assert.strictEqual(err.code, 'ServiceUnavailable');
|
expect(err.code).toBe('ServiceUnavailable');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -64,12 +64,11 @@ function putSourceObj(testParams, cb) {
|
||||||
sourceParams.Body = someBody;
|
sourceParams.Body = someBody;
|
||||||
}
|
}
|
||||||
s3.putObject(sourceParams, (err, result) => {
|
s3.putObject(sourceParams, (err, result) => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Error putting source object: ${err}`);
|
|
||||||
if (isEmptyObj) {
|
if (isEmptyObj) {
|
||||||
assert.strictEqual(result.ETag, `"${emptyMD5}"`);
|
expect(result.ETag).toBe(`"${emptyMD5}"`);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(result.ETag, `"${correctMD5}"`);
|
expect(result.ETag).toBe(`"${correctMD5}"`);
|
||||||
}
|
}
|
||||||
Object.assign(testParams, {
|
Object.assign(testParams, {
|
||||||
sourceKey,
|
sourceKey,
|
||||||
|
@ -98,12 +97,11 @@ function copyObject(testParams, cb) {
|
||||||
`${copyParams.CopySource}?versionId=null`;
|
`${copyParams.CopySource}?versionId=null`;
|
||||||
}
|
}
|
||||||
s3.copyObject(copyParams, (err, data) => {
|
s3.copyObject(copyParams, (err, data) => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Error copying object to destination: ${err}`);
|
|
||||||
if (destVersioningState === 'Enabled') {
|
if (destVersioningState === 'Enabled') {
|
||||||
assert.notEqual(data.VersionId, undefined);
|
expect(data.VersionId).not.toEqual(undefined);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(data.VersionId, undefined);
|
expect(data.VersionId).toBe(undefined);
|
||||||
}
|
}
|
||||||
const expectedBody = isEmptyObj ? '' : someBody;
|
const expectedBody = isEmptyObj ? '' : someBody;
|
||||||
return awsGetLatestVerId(destKey, expectedBody, (err, awsVersionId) => {
|
return awsGetLatestVerId(destKey, expectedBody, (err, awsVersionId) => {
|
||||||
|
@ -145,17 +143,17 @@ function assertGetObjects(testParams, cb) {
|
||||||
cb => s3.getObject(destGetParams, cb),
|
cb => s3.getObject(destGetParams, cb),
|
||||||
cb => awsS3.getObject(awsParams, cb),
|
cb => awsS3.getObject(awsParams, cb),
|
||||||
], (err, results) => {
|
], (err, results) => {
|
||||||
assert.strictEqual(err, null, `Error in assertGetObjects: ${err}`);
|
expect(err).toBe(null);
|
||||||
const [sourceRes, destRes, awsRes] = results;
|
const [sourceRes, destRes, awsRes] = results;
|
||||||
if (isEmptyObj) {
|
if (isEmptyObj) {
|
||||||
assert.strictEqual(sourceRes.ETag, `"${emptyMD5}"`);
|
expect(sourceRes.ETag).toBe(`"${emptyMD5}"`);
|
||||||
assert.strictEqual(destRes.ETag, `"${emptyMD5}"`);
|
expect(destRes.ETag).toBe(`"${emptyMD5}"`);
|
||||||
assert.strictEqual(awsRes.ETag, `"${emptyMD5}"`);
|
expect(awsRes.ETag).toBe(`"${emptyMD5}"`);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(sourceRes.ETag, `"${correctMD5}"`);
|
expect(sourceRes.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(destRes.ETag, `"${correctMD5}"`);
|
expect(destRes.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.deepStrictEqual(sourceRes.Body, destRes.Body);
|
assert.deepStrictEqual(sourceRes.Body, destRes.Body);
|
||||||
assert.strictEqual(awsRes.ETag, `"${correctMD5}"`);
|
expect(awsRes.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.deepStrictEqual(sourceRes.Body, awsRes.Body);
|
assert.deepStrictEqual(sourceRes.Body, awsRes.Body);
|
||||||
}
|
}
|
||||||
if (directive === 'COPY') {
|
if (directive === 'COPY') {
|
||||||
|
@ -166,7 +164,7 @@ function assertGetObjects(testParams, cb) {
|
||||||
assert.deepStrictEqual(destRes.Metadata, {});
|
assert.deepStrictEqual(destRes.Metadata, {});
|
||||||
assert.deepStrictEqual(awsRes.Metadata, {});
|
assert.deepStrictEqual(awsRes.Metadata, {});
|
||||||
}
|
}
|
||||||
assert.strictEqual(sourceRes.ContentLength, destRes.ContentLength);
|
expect(sourceRes.ContentLength).toBe(destRes.ContentLength);
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -198,8 +196,7 @@ function testSuite() {
|
||||||
`in afterEach: ${err}\n`);
|
`in afterEach: ${err}\n`);
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
})
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
[{
|
[{
|
||||||
directive: 'REPLACE',
|
directive: 'REPLACE',
|
||||||
|
@ -357,7 +354,7 @@ function testSuite() {
|
||||||
expectedError: 'NoSuchKey' }, next),
|
expectedError: 'NoSuchKey' }, next),
|
||||||
next => awsGetLatestVerId(testParams.destKey, someBody, next),
|
next => awsGetLatestVerId(testParams.destKey, someBody, next),
|
||||||
(awsVerId, next) => {
|
(awsVerId, next) => {
|
||||||
assert.strictEqual(awsVerId, this.test.awsVerId);
|
expect(awsVerId).toBe(this.test.awsVerId);
|
||||||
next();
|
next();
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
|
@ -389,7 +386,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
const { sourceLocation, directive, isEmptyObj } = testParams;
|
const { sourceLocation, directive, isEmptyObj } = testParams;
|
||||||
|
|
||||||
it(`should copy ${isEmptyObj ? 'empty ' : ''}object from ` +
|
test(`should copy ${isEmptyObj ? 'empty ' : ''}object from ` +
|
||||||
`${sourceLocation} to bucket on AWS backend with ` +
|
`${sourceLocation} to bucket on AWS backend with ` +
|
||||||
`versioning with ${directive}`, done => {
|
`versioning with ${directive}`, done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -401,7 +398,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should copy ${isEmptyObj ? 'an empty ' : ''}version from ` +
|
test(`should copy ${isEmptyObj ? 'an empty ' : ''}version from ` +
|
||||||
`${sourceLocation} to bucket on AWS backend with ` +
|
`${sourceLocation} to bucket on AWS backend with ` +
|
||||||
`versioning with ${directive} directive`, done => {
|
`versioning with ${directive} directive`, done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
|
|
@ -86,8 +86,7 @@ function assertCopyPart(infos, cb) {
|
||||||
Key: mpuKeyNameAzure,
|
Key: mpuKeyNameAzure,
|
||||||
UploadId: uploadId,
|
UploadId: uploadId,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.equal(err, null, 'listParts: Expected success,' +
|
expect(err).toEqual(null);
|
||||||
` got error: ${err}`);
|
|
||||||
resultCopy.Parts =
|
resultCopy.Parts =
|
||||||
[{ PartNumber: 1,
|
[{ PartNumber: 1,
|
||||||
LastModified: res.Parts[0].LastModified,
|
LastModified: res.Parts[0].LastModified,
|
||||||
|
@ -98,13 +97,11 @@ function assertCopyPart(infos, cb) {
|
||||||
}),
|
}),
|
||||||
next => azureClient.listBlocks(azureContainerName,
|
next => azureClient.listBlocks(azureContainerName,
|
||||||
mpuKeyNameAzure, 'all', (err, res) => {
|
mpuKeyNameAzure, 'all', (err, res) => {
|
||||||
assert.equal(err, null, 'listBlocks: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
|
||||||
subPartSize.forEach((size, index) => {
|
subPartSize.forEach((size, index) => {
|
||||||
const partName = azureMpuUtils.getBlockId(uploadId, 1, index);
|
const partName = azureMpuUtils.getBlockId(uploadId, 1, index);
|
||||||
assert.strictEqual(res.UncommittedBlocks[index].Name,
|
expect(res.UncommittedBlocks[index].Name).toBe(partName);
|
||||||
partName);
|
expect(res.UncommittedBlocks[index].Size).toEqual(size);
|
||||||
assert.equal(res.UncommittedBlocks[index].Size, size);
|
|
||||||
});
|
});
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
|
@ -137,36 +134,42 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to AZURE', function describeF() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
describe('Basic test: ', () => {
|
describe('Basic test: ', () => {
|
||||||
beforeEach(function beF(done) {
|
let testContext;
|
||||||
this.currentTest.keyNameNormalAzure =
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
testContext.currentTest.keyNameNormalAzure =
|
||||||
`normalazure${uniqName(keyObjectAzure)}`;
|
`normalazure${uniqName(keyObjectAzure)}`;
|
||||||
this.currentTest.keyNameNormalAzureMismatch =
|
testContext.currentTest.keyNameNormalAzureMismatch =
|
||||||
`normalazuremismatch${uniqName(keyObjectAzure)}`;
|
`normalazuremismatch${uniqName(keyObjectAzure)}`;
|
||||||
|
|
||||||
this.currentTest.keyNameFiveMbAzure =
|
testContext.currentTest.keyNameFiveMbAzure =
|
||||||
`fivembazure${uniqName(keyObjectAzure)}`;
|
`fivembazure${uniqName(keyObjectAzure)}`;
|
||||||
this.currentTest.keyNameFiveMbMem =
|
testContext.currentTest.keyNameFiveMbMem =
|
||||||
`fivembmem${uniqName(keyObjectMemory)}`;
|
`fivembmem${uniqName(keyObjectMemory)}`;
|
||||||
|
|
||||||
this.currentTest.mpuKeyNameAzure =
|
testContext.currentTest.mpuKeyNameAzure =
|
||||||
`mpukeyname${uniqName(keyObjectAzure)}`;
|
`mpukeyname${uniqName(keyObjectAzure)}`;
|
||||||
this.currentTest.mpuKeyNameMem =
|
testContext.currentTest.mpuKeyNameMem =
|
||||||
`mpukeyname${uniqName(keyObjectMemory)}`;
|
`mpukeyname${uniqName(keyObjectMemory)}`;
|
||||||
this.currentTest.mpuKeyNameAWS =
|
testContext.currentTest.mpuKeyNameAWS =
|
||||||
`mpukeyname${uniqName(keyObjectAWS)}`;
|
`mpukeyname${uniqName(keyObjectAWS)}`;
|
||||||
const paramsAzure = {
|
const paramsAzure = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.mpuKeyNameAzure,
|
Key: testContext.currentTest.mpuKeyNameAzure,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
const paramsMem = {
|
const paramsMem = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
Key: this.currentTest.mpuKeyNameMem,
|
Key: testContext.currentTest.mpuKeyNameMem,
|
||||||
Metadata: { 'scal-location-constraint': memLocation },
|
Metadata: { 'scal-location-constraint': memLocation },
|
||||||
};
|
};
|
||||||
const paramsAWS = {
|
const paramsAWS = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
Key: this.currentTest.mpuKeyNameAWS,
|
Key: testContext.currentTest.mpuKeyNameAWS,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation },
|
Metadata: { 'scal-location-constraint': awsLocation },
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -176,67 +179,64 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to AZURE', function describeF() {
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
next => s3.putObject({
|
next => s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.keyNameNormalAzure,
|
Key: testContext.currentTest.keyNameNormalAzure,
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.putObject({
|
next => s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.keyNameNormalAzureMismatch,
|
Key: testContext.currentTest.keyNameNormalAzureMismatch,
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
Metadata: { 'scal-location-constraint':
|
Metadata: { 'scal-location-constraint':
|
||||||
azureLocationMismatch },
|
azureLocationMismatch },
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.putObject({
|
next => s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.keyNameFiveMbAzure,
|
Key: testContext.currentTest.keyNameFiveMbAzure,
|
||||||
Body: fiveMbBody,
|
Body: fiveMbBody,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.putObject({
|
next => s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.keyNameFiveMbMem,
|
Key: testContext.currentTest.keyNameFiveMbMem,
|
||||||
Body: fiveMbBody,
|
Body: fiveMbBody,
|
||||||
Metadata: { 'scal-location-constraint': memLocation },
|
Metadata: { 'scal-location-constraint': memLocation },
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.createMultipartUpload(paramsAzure,
|
next => s3.createMultipartUpload(paramsAzure,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'createMultipartUpload ' +
|
expect(err).toEqual(null);
|
||||||
`on Azure: Expected success, got error: ${err}`);
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
this.currentTest.uploadId = res.UploadId;
|
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
next => s3.createMultipartUpload(paramsMem,
|
next => s3.createMultipartUpload(paramsMem,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'createMultipartUpload ' +
|
expect(err).toEqual(null);
|
||||||
`in memory: Expected success, got error: ${err}`);
|
testContext.currentTest.uploadIdMem = res.UploadId;
|
||||||
this.currentTest.uploadIdMem = res.UploadId;
|
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
next => s3.createMultipartUpload(paramsAWS,
|
next => s3.createMultipartUpload(paramsAWS,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'createMultipartUpload ' +
|
expect(err).toEqual(null);
|
||||||
`on AWS: Expected success, got error: ${err}`);
|
testContext.currentTest.uploadIdAWS = res.UploadId;
|
||||||
this.currentTest.uploadIdAWS = res.UploadId;
|
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
afterEach(function afterEachF(done) {
|
afterEach(done => {
|
||||||
const paramsAzure = {
|
const paramsAzure = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.mpuKeyNameAzure,
|
Key: testContext.currentTest.mpuKeyNameAzure,
|
||||||
UploadId: this.currentTest.uploadId,
|
UploadId: testContext.currentTest.uploadId,
|
||||||
};
|
};
|
||||||
const paramsMem = {
|
const paramsMem = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
Key: this.currentTest.mpuKeyNameMem,
|
Key: testContext.currentTest.mpuKeyNameMem,
|
||||||
UploadId: this.currentTest.uploadIdMem,
|
UploadId: testContext.currentTest.uploadIdMem,
|
||||||
};
|
};
|
||||||
const paramsAWS = {
|
const paramsAWS = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
Key: this.currentTest.mpuKeyNameAWS,
|
Key: testContext.currentTest.mpuKeyNameAWS,
|
||||||
UploadId: this.currentTest.uploadIdAWS,
|
UploadId: testContext.currentTest.uploadIdAWS,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.abortMultipartUpload(paramsAzure,
|
next => s3.abortMultipartUpload(paramsAzure,
|
||||||
|
@ -247,60 +247,59 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to AZURE', function describeF() {
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
it('should copy small part from Azure to MPU with Azure location',
|
test(
|
||||||
function ifF(done) {
|
'should copy small part from Azure to MPU with Azure location',
|
||||||
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${azureContainerName}/${this.test.keyNameNormalAzure}`,
|
`${azureContainerName}/${testContext.test.keyNameNormalAzure}`,
|
||||||
Key: this.test.mpuKeyNameAzure,
|
Key: testContext.test.mpuKeyNameAzure,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
expect(res.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
const infos = {
|
const infos = {
|
||||||
azureContainerName,
|
azureContainerName,
|
||||||
mpuKeyNameAzure: this.test.mpuKeyNameAzure,
|
mpuKeyNameAzure: testContext.test.mpuKeyNameAzure,
|
||||||
uploadId: this.test.uploadId,
|
uploadId: testContext.test.uploadId,
|
||||||
md5: normalMD5,
|
md5: normalMD5,
|
||||||
subPartSize: [normalBodySize],
|
subPartSize: [normalBodySize],
|
||||||
};
|
};
|
||||||
assertCopyPart(infos, next);
|
assertCopyPart(infos, next);
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should copy small part from Azure location with ' +
|
test('should copy small part from Azure location with ' +
|
||||||
'bucketMatch=false to MPU with Azure location',
|
'bucketMatch=false to MPU with Azure location', done => {
|
||||||
function ifF(done) {
|
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${azureContainerName}/` +
|
`${azureContainerName}/` +
|
||||||
`${this.test.keyNameNormalAzureMismatch}`,
|
`${testContext.test.keyNameNormalAzureMismatch}`,
|
||||||
Key: this.test.mpuKeyNameAzure,
|
Key: testContext.test.mpuKeyNameAzure,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
expect(res.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
const infos = {
|
const infos = {
|
||||||
azureContainerName,
|
azureContainerName,
|
||||||
mpuKeyNameAzure: this.test.mpuKeyNameAzure,
|
mpuKeyNameAzure: testContext.test.mpuKeyNameAzure,
|
||||||
uploadId: this.test.uploadId,
|
uploadId: testContext.test.uploadId,
|
||||||
md5: normalMD5,
|
md5: normalMD5,
|
||||||
subPartSize: [normalBodySize],
|
subPartSize: [normalBodySize],
|
||||||
};
|
};
|
||||||
|
@ -309,67 +308,66 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to AZURE', function describeF() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy 5 Mb part from Azure to MPU with Azure location',
|
test(
|
||||||
function ifF(done) {
|
'should copy 5 Mb part from Azure to MPU with Azure location',
|
||||||
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${azureContainerName}/${this.test.keyNameFiveMbAzure}`,
|
`${azureContainerName}/${testContext.test.keyNameFiveMbAzure}`,
|
||||||
Key: this.test.mpuKeyNameAzure,
|
Key: testContext.test.mpuKeyNameAzure,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
expect(res.ETag).toBe(`"${fiveMbMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${fiveMbMD5}"`);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
const infos = {
|
const infos = {
|
||||||
azureContainerName,
|
azureContainerName,
|
||||||
mpuKeyNameAzure: this.test.mpuKeyNameAzure,
|
mpuKeyNameAzure: testContext.test.mpuKeyNameAzure,
|
||||||
uploadId: this.test.uploadId,
|
uploadId: testContext.test.uploadId,
|
||||||
md5: fiveMbMD5,
|
md5: fiveMbMD5,
|
||||||
subPartSize: [fiveMB],
|
subPartSize: [fiveMB],
|
||||||
};
|
};
|
||||||
assertCopyPart(infos, next);
|
assertCopyPart(infos, next);
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should copy part from Azure to MPU with memory location',
|
test(
|
||||||
function ifF(done) {
|
'should copy part from Azure to MPU with memory location',
|
||||||
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${azureContainerName}/${this.test.keyNameNormalAzure}`,
|
`${azureContainerName}/${testContext.test.keyNameNormalAzure}`,
|
||||||
Key: this.test.mpuKeyNameMem,
|
Key: testContext.test.mpuKeyNameMem,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadIdMem,
|
UploadId: testContext.test.uploadIdMem,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
expect(res.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
s3.listParts({
|
s3.listParts({
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
Key: this.test.mpuKeyNameMem,
|
Key: testContext.test.mpuKeyNameMem,
|
||||||
UploadId: this.test.uploadIdMem,
|
UploadId: testContext.test.uploadIdMem,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
'listParts: Expected success,' +
|
|
||||||
` got error: ${err}`);
|
|
||||||
const resultCopy =
|
const resultCopy =
|
||||||
JSON.parse(JSON.stringify(result));
|
JSON.parse(JSON.stringify(result));
|
||||||
resultCopy.Bucket = memBucketName;
|
resultCopy.Bucket = memBucketName;
|
||||||
resultCopy.Key = this.test.mpuKeyNameMem;
|
resultCopy.Key = testContext.test.mpuKeyNameMem;
|
||||||
resultCopy.UploadId = this.test.uploadIdMem;
|
resultCopy.UploadId = testContext.test.uploadIdMem;
|
||||||
resultCopy.Parts =
|
resultCopy.Parts =
|
||||||
[{ PartNumber: 1,
|
[{ PartNumber: 1,
|
||||||
LastModified: res.Parts[0].LastModified,
|
LastModified: res.Parts[0].LastModified,
|
||||||
|
@ -380,23 +378,24 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to AZURE', function describeF() {
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should copy part from Azure to MPU with AWS location',
|
test(
|
||||||
function ifF(done) {
|
'should copy part from Azure to MPU with AWS location',
|
||||||
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${azureContainerName}/${this.test.keyNameNormalAzure}`,
|
`${azureContainerName}/${testContext.test.keyNameNormalAzure}`,
|
||||||
Key: this.test.mpuKeyNameAWS,
|
Key: testContext.test.mpuKeyNameAWS,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadIdAWS,
|
UploadId: testContext.test.uploadIdAWS,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
expect(res.ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
|
@ -405,45 +404,39 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to AZURE', function describeF() {
|
||||||
.details.bucketName;
|
.details.bucketName;
|
||||||
awsS3.listParts({
|
awsS3.listParts({
|
||||||
Bucket: awsBucket,
|
Bucket: awsBucket,
|
||||||
Key: this.test.mpuKeyNameAWS,
|
Key: testContext.test.mpuKeyNameAWS,
|
||||||
UploadId: this.test.uploadIdAWS,
|
UploadId: testContext.test.uploadIdAWS,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
'listParts: Expected success,' +
|
expect(res.Bucket).toBe(awsBucket);
|
||||||
` got error: ${err}`);
|
expect(res.Key).toBe(testContext.test.mpuKeyNameAWS);
|
||||||
assert.strictEqual(res.Bucket, awsBucket);
|
expect(res.UploadId).toBe(testContext.test.uploadIdAWS);
|
||||||
assert.strictEqual(res.Key,
|
expect(res.Parts.length).toBe(1);
|
||||||
this.test.mpuKeyNameAWS);
|
expect(res.Parts[0].PartNumber).toBe(1);
|
||||||
assert.strictEqual(res.UploadId,
|
expect(res.Parts[0].ETag).toBe(`"${normalMD5}"`);
|
||||||
this.test.uploadIdAWS);
|
expect(res.Parts[0].Size).toBe(normalBodySize);
|
||||||
assert.strictEqual(res.Parts.length, 1);
|
|
||||||
assert.strictEqual(res.Parts[0].PartNumber, 1);
|
|
||||||
assert.strictEqual(res.Parts[0].ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assert.strictEqual(res.Parts[0].Size,
|
|
||||||
normalBodySize);
|
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should copy part from Azure object with range to MPU ' +
|
test('should copy part from Azure object with range to MPU ' +
|
||||||
'with AWS location', function ifF(done) {
|
'with AWS location', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${azureContainerName}/${this.test.keyNameNormalAzure}`,
|
`${azureContainerName}/${testContext.test.keyNameNormalAzure}`,
|
||||||
Key: this.test.mpuKeyNameAWS,
|
Key: testContext.test.mpuKeyNameAWS,
|
||||||
CopySourceRange: 'bytes=0-5',
|
CopySourceRange: 'bytes=0-5',
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadIdAWS,
|
UploadId: testContext.test.uploadIdAWS,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
expect(res.ETag).toBe(`"${sixBytesMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${sixBytesMD5}"`);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
|
@ -452,51 +445,44 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to AZURE', function describeF() {
|
||||||
.details.bucketName;
|
.details.bucketName;
|
||||||
awsS3.listParts({
|
awsS3.listParts({
|
||||||
Bucket: awsBucket,
|
Bucket: awsBucket,
|
||||||
Key: this.test.mpuKeyNameAWS,
|
Key: testContext.test.mpuKeyNameAWS,
|
||||||
UploadId: this.test.uploadIdAWS,
|
UploadId: testContext.test.uploadIdAWS,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
'listParts: Expected success,' +
|
expect(res.Bucket).toBe(awsBucket);
|
||||||
` got error: ${err}`);
|
expect(res.Key).toBe(testContext.test.mpuKeyNameAWS);
|
||||||
assert.strictEqual(res.Bucket, awsBucket);
|
expect(res.UploadId).toBe(testContext.test.uploadIdAWS);
|
||||||
assert.strictEqual(res.Key,
|
expect(res.Parts.length).toBe(1);
|
||||||
this.test.mpuKeyNameAWS);
|
expect(res.Parts[0].PartNumber).toBe(1);
|
||||||
assert.strictEqual(res.UploadId,
|
expect(res.Parts[0].ETag).toBe(`"${sixBytesMD5}"`);
|
||||||
this.test.uploadIdAWS);
|
expect(res.Parts[0].Size).toBe(6);
|
||||||
assert.strictEqual(res.Parts.length, 1);
|
|
||||||
assert.strictEqual(res.Parts[0].PartNumber, 1);
|
|
||||||
assert.strictEqual(res.Parts[0].ETag,
|
|
||||||
`"${sixBytesMD5}"`);
|
|
||||||
assert.strictEqual(res.Parts[0].Size, 6);
|
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy 5 Mb part from a memory location to MPU with ' +
|
test('should copy 5 Mb part from a memory location to MPU with ' +
|
||||||
'Azure location',
|
'Azure location', done => {
|
||||||
function ifF(done) {
|
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${azureContainerName}/${this.test.keyNameFiveMbMem}`,
|
`${azureContainerName}/${testContext.test.keyNameFiveMbMem}`,
|
||||||
Key: this.test.mpuKeyNameAzure,
|
Key: testContext.test.mpuKeyNameAzure,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
expect(res.ETag).toBe(`"${fiveMbMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${fiveMbMD5}"`);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
const infos = {
|
const infos = {
|
||||||
azureContainerName,
|
azureContainerName,
|
||||||
mpuKeyNameAzure: this.test.mpuKeyNameAzure,
|
mpuKeyNameAzure: testContext.test.mpuKeyNameAzure,
|
||||||
uploadId: this.test.uploadId,
|
uploadId: testContext.test.uploadId,
|
||||||
md5: fiveMbMD5,
|
md5: fiveMbMD5,
|
||||||
subPartSize: [fiveMB],
|
subPartSize: [fiveMB],
|
||||||
};
|
};
|
||||||
|
@ -506,45 +492,42 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to AZURE', function describeF() {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with existing part', () => {
|
describe('with existing part', () => {
|
||||||
beforeEach(function beF(done) {
|
beforeEach(done => {
|
||||||
const params = {
|
const params = {
|
||||||
Body: oneKbBody,
|
Body: oneKbBody,
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.mpuKeyNameAzure,
|
Key: testContext.currentTest.mpuKeyNameAzure,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.currentTest.uploadId,
|
UploadId: testContext.currentTest.uploadId,
|
||||||
};
|
};
|
||||||
s3.uploadPart(params, done);
|
s3.uploadPart(params, done);
|
||||||
});
|
});
|
||||||
it('should copy part from Azure to Azure with existing ' +
|
test('should copy part from Azure to Azure with existing ' +
|
||||||
'parts', function ifF(done) {
|
'parts', done => {
|
||||||
const resultCopy = JSON.parse(JSON.stringify(result));
|
const resultCopy = JSON.parse(JSON.stringify(result));
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${azureContainerName}/${this.test.keyNameNormalAzure}`,
|
`${azureContainerName}/${testContext.test.keyNameNormalAzure}`,
|
||||||
Key: this.test.mpuKeyNameAzure,
|
Key: testContext.test.mpuKeyNameAzure,
|
||||||
PartNumber: 2,
|
PartNumber: 2,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
'uploadPartCopy: Expected success, got ' +
|
expect(res.ETag).toBe(`"${normalMD5}"`);
|
||||||
`error: ${err}`);
|
|
||||||
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => s3.listParts({
|
next => s3.listParts({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.mpuKeyNameAzure,
|
Key: testContext.test.mpuKeyNameAzure,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.equal(err, null, 'listParts: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
|
||||||
resultCopy.Bucket = azureContainerName;
|
resultCopy.Bucket = azureContainerName;
|
||||||
resultCopy.Key = this.test.mpuKeyNameAzure;
|
resultCopy.Key = testContext.test.mpuKeyNameAzure;
|
||||||
resultCopy.UploadId = this.test.uploadId;
|
resultCopy.UploadId = testContext.test.uploadId;
|
||||||
resultCopy.Parts =
|
resultCopy.Parts =
|
||||||
[{ PartNumber: 1,
|
[{ PartNumber: 1,
|
||||||
LastModified: res.Parts[0].LastModified,
|
LastModified: res.Parts[0].LastModified,
|
||||||
|
@ -559,21 +542,16 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to AZURE', function describeF() {
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
next => azureClient.listBlocks(azureContainerName,
|
next => azureClient.listBlocks(azureContainerName,
|
||||||
this.test.mpuKeyNameAzure, 'all', (err, res) => {
|
testContext.test.mpuKeyNameAzure, 'all', (err, res) => {
|
||||||
assert.equal(err, null, 'listBlocks: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
|
||||||
const partName = azureMpuUtils.getBlockId(
|
const partName = azureMpuUtils.getBlockId(
|
||||||
this.test.uploadId, 1, 0);
|
testContext.test.uploadId, 1, 0);
|
||||||
const partName2 = azureMpuUtils.getBlockId(
|
const partName2 = azureMpuUtils.getBlockId(
|
||||||
this.test.uploadId, 2, 0);
|
testContext.test.uploadId, 2, 0);
|
||||||
assert.strictEqual(res.UncommittedBlocks[0].Name,
|
expect(res.UncommittedBlocks[0].Name).toBe(partName);
|
||||||
partName);
|
expect(res.UncommittedBlocks[0].Size).toEqual(oneKb);
|
||||||
assert.equal(res.UncommittedBlocks[0].Size,
|
expect(res.UncommittedBlocks[1].Name).toBe(partName2);
|
||||||
oneKb);
|
expect(res.UncommittedBlocks[1].Size).toEqual(11);
|
||||||
assert.strictEqual(res.UncommittedBlocks[1].Name,
|
|
||||||
partName2);
|
|
||||||
assert.equal(res.UncommittedBlocks[1].Size,
|
|
||||||
11);
|
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
|
@ -605,15 +583,21 @@ function describeF() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
describe('Basic test with large object: ', () => {
|
describe('Basic test with large object: ', () => {
|
||||||
beforeEach(function beF(done) {
|
let testContext;
|
||||||
this.currentTest.keyNameOneHundredAndFiveMbAzure =
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
testContext.currentTest.keyNameOneHundredAndFiveMbAzure =
|
||||||
`onehundredandfivembazure${uniqName(keyObjectAzure)}`;
|
`onehundredandfivembazure${uniqName(keyObjectAzure)}`;
|
||||||
this.currentTest.mpuKeyNameAzure =
|
testContext.currentTest.mpuKeyNameAzure =
|
||||||
`mpukeyname${uniqName(keyObjectAzure)}`;
|
`mpukeyname${uniqName(keyObjectAzure)}`;
|
||||||
|
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.mpuKeyNameAzure,
|
Key: testContext.currentTest.mpuKeyNameAzure,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -621,52 +605,49 @@ function describeF() {
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
next => s3.putObject({
|
next => s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.keyNameOneHundredAndFiveMbAzure,
|
Key: testContext.currentTest.keyNameOneHundredAndFiveMbAzure,
|
||||||
Body: oneHundredAndFiveMbBody,
|
Body: oneHundredAndFiveMbBody,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.createMultipartUpload(params, (err, res) => {
|
next => s3.createMultipartUpload(params, (err, res) => {
|
||||||
assert.equal(err, null, 'createMultipartUpload: ' +
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error: ${err}`);
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
this.currentTest.uploadId = res.UploadId;
|
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
afterEach(function afterEachF(done) {
|
afterEach(done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.mpuKeyNameAzure,
|
Key: testContext.currentTest.mpuKeyNameAzure,
|
||||||
UploadId: this.currentTest.uploadId,
|
UploadId: testContext.currentTest.uploadId,
|
||||||
};
|
};
|
||||||
s3.abortMultipartUpload(params, done);
|
s3.abortMultipartUpload(params, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy 105 MB part from Azure to MPU with Azure ' +
|
test('should copy 105 MB part from Azure to MPU with Azure ' +
|
||||||
'location', function ifF(done) {
|
'location', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${azureContainerName}/` +
|
`${azureContainerName}/` +
|
||||||
`${this.test.keyNameOneHundredAndFiveMbAzure}`,
|
`${testContext.test.keyNameOneHundredAndFiveMbAzure}`,
|
||||||
Key: this.test.mpuKeyNameAzure,
|
Key: testContext.test.mpuKeyNameAzure,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
expect(res.ETag).toBe(`"${oneHundredAndFiveMbMD5}"`);
|
||||||
assert.strictEqual(res.ETag,
|
|
||||||
`"${oneHundredAndFiveMbMD5}"`);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
const infos = {
|
const infos = {
|
||||||
azureContainerName,
|
azureContainerName,
|
||||||
mpuKeyNameAzure:
|
mpuKeyNameAzure:
|
||||||
this.test.mpuKeyNameAzure,
|
testContext.test.mpuKeyNameAzure,
|
||||||
uploadId: this.test.uploadId,
|
uploadId: testContext.test.uploadId,
|
||||||
md5: oneHundredAndFiveMbMD5,
|
md5: oneHundredAndFiveMbMD5,
|
||||||
subPartSize: [100 * 1024 * 1024, 5 * 1024 * 1024],
|
subPartSize: [100 * 1024 * 1024, 5 * 1024 * 1024],
|
||||||
};
|
};
|
||||||
|
@ -709,15 +690,21 @@ function describeF() {
|
||||||
});
|
});
|
||||||
describe('Basic test with complete MPU from AWS to Azure location: ',
|
describe('Basic test with complete MPU from AWS to Azure location: ',
|
||||||
() => {
|
() => {
|
||||||
beforeEach(function beF(done) {
|
let testContext;
|
||||||
this.currentTest.keyNameAws =
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
testContext.currentTest.keyNameAws =
|
||||||
`onehundredandfivembazure${uniqName(keyObjectAWS)}`;
|
`onehundredandfivembazure${uniqName(keyObjectAWS)}`;
|
||||||
this.currentTest.mpuKeyNameAzure =
|
testContext.currentTest.mpuKeyNameAzure =
|
||||||
`mpukeyname${uniqName(keyObjectAzure)}`;
|
`mpukeyname${uniqName(keyObjectAzure)}`;
|
||||||
|
|
||||||
const createMpuParams = {
|
const createMpuParams = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.mpuKeyNameAzure,
|
Key: testContext.currentTest.mpuKeyNameAzure,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -727,57 +714,54 @@ function describeF() {
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
next => s3.putObject({
|
next => s3.putObject({
|
||||||
Bucket: awsBucketName,
|
Bucket: awsBucketName,
|
||||||
Key: this.currentTest.keyNameAws,
|
Key: testContext.currentTest.keyNameAws,
|
||||||
Body: fiveMbBody,
|
Body: fiveMbBody,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation },
|
Metadata: { 'scal-location-constraint': awsLocation },
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.createMultipartUpload(createMpuParams,
|
next => s3.createMultipartUpload(createMpuParams,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'createMultipartUpload: ' +
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error: ${err}`);
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
this.currentTest.uploadId = res.UploadId;
|
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy two 5 MB part from Azure to MPU with Azure ' +
|
test('should copy two 5 MB part from Azure to MPU with Azure ' +
|
||||||
'location', function ifF(done) {
|
'location', done => {
|
||||||
const uploadParams = {
|
const uploadParams = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${awsBucketName}/` +
|
`${awsBucketName}/` +
|
||||||
`${this.test.keyNameAws}`,
|
`${testContext.test.keyNameAws}`,
|
||||||
Key: this.test.mpuKeyNameAzure,
|
Key: testContext.test.mpuKeyNameAzure,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
const uploadParams2 = {
|
const uploadParams2 = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${awsBucketName}/` +
|
`${awsBucketName}/` +
|
||||||
`${this.test.keyNameAws}`,
|
`${testContext.test.keyNameAws}`,
|
||||||
Key: this.test.mpuKeyNameAzure,
|
Key: testContext.test.mpuKeyNameAzure,
|
||||||
PartNumber: 2,
|
PartNumber: 2,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(uploadParams, (err, res) => {
|
next => s3.uploadPartCopy(uploadParams, (err, res) => {
|
||||||
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
expect(res.ETag).toBe(`"${fiveMbMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${fiveMbMD5}"`);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => s3.uploadPartCopy(uploadParams2, (err, res) => {
|
next => s3.uploadPartCopy(uploadParams2, (err, res) => {
|
||||||
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
expect(res.ETag).toBe(`"${fiveMbMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${fiveMbMD5}"`);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
const completeMpuParams = {
|
const completeMpuParams = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.mpuKeyNameAzure,
|
Key: testContext.test.mpuKeyNameAzure,
|
||||||
MultipartUpload: {
|
MultipartUpload: {
|
||||||
Parts: [
|
Parts: [
|
||||||
{
|
{
|
||||||
|
@ -790,15 +774,13 @@ function describeF() {
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
s3.completeMultipartUpload(completeMpuParams,
|
s3.completeMultipartUpload(completeMpuParams,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'completeMultipartUpload:' +
|
expect(err).toEqual(null);
|
||||||
` Expected success, got error: ${err}`);
|
expect(res.Bucket).toBe(azureContainerName);
|
||||||
assert.strictEqual(res.Bucket, azureContainerName);
|
expect(res.Key).toBe(testContext.test.mpuKeyNameAzure);
|
||||||
assert.strictEqual(res.Key,
|
|
||||||
this.test.mpuKeyNameAzure);
|
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
|
@ -81,7 +81,7 @@ function assertCopyPart(infos, cb) {
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.ifError(err, 'GCP listParts: Expected success,' +
|
assert.ifError(err, 'GCP listParts: Expected success,' +
|
||||||
`got error: ${err}`);
|
`got error: ${err}`);
|
||||||
assert.strictEqual(res.Contents[0].ETag, `"${md5}"`);
|
expect(res.Contents[0].ETag).toBe(`"${md5}"`);
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], cb);
|
], cb);
|
||||||
|
@ -119,36 +119,42 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to GCP', function describeFn() {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Basic test: ', () => {
|
describe('Basic test: ', () => {
|
||||||
beforeEach(function beforeFn(done) {
|
let testContext;
|
||||||
this.currentTest.keyNameNormalGcp =
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
testContext.currentTest.keyNameNormalGcp =
|
||||||
`normalgcp${uniqName(keyObjectGcp)}`;
|
`normalgcp${uniqName(keyObjectGcp)}`;
|
||||||
this.currentTest.keyNameNormalGcpMismatch =
|
testContext.currentTest.keyNameNormalGcpMismatch =
|
||||||
`normalgcpmismatch${uniqName(keyObjectGcp)}`;
|
`normalgcpmismatch${uniqName(keyObjectGcp)}`;
|
||||||
|
|
||||||
this.currentTest.keyNameFiveMbGcp =
|
testContext.currentTest.keyNameFiveMbGcp =
|
||||||
`fivembgcp${uniqName(keyObjectGcp)}`;
|
`fivembgcp${uniqName(keyObjectGcp)}`;
|
||||||
this.currentTest.keyNameFiveMbMem =
|
testContext.currentTest.keyNameFiveMbMem =
|
||||||
`fivembmem${uniqName(keyObjectMemory)}`;
|
`fivembmem${uniqName(keyObjectMemory)}`;
|
||||||
|
|
||||||
this.currentTest.mpuKeyNameGcp =
|
testContext.currentTest.mpuKeyNameGcp =
|
||||||
`mpukeyname${uniqName(keyObjectGcp)}`;
|
`mpukeyname${uniqName(keyObjectGcp)}`;
|
||||||
this.currentTest.mpuKeyNameMem =
|
testContext.currentTest.mpuKeyNameMem =
|
||||||
`mpukeyname${uniqName(keyObjectMemory)}`;
|
`mpukeyname${uniqName(keyObjectMemory)}`;
|
||||||
this.currentTest.mpuKeyNameAWS =
|
testContext.currentTest.mpuKeyNameAWS =
|
||||||
`mpukeyname${uniqName(keyObjectAWS)}`;
|
`mpukeyname${uniqName(keyObjectAWS)}`;
|
||||||
const paramsGcp = {
|
const paramsGcp = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.mpuKeyNameGcp,
|
Key: testContext.currentTest.mpuKeyNameGcp,
|
||||||
Metadata: { 'scal-location-constraint': gcpLocation },
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
};
|
};
|
||||||
const paramsMem = {
|
const paramsMem = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
Key: this.currentTest.mpuKeyNameMem,
|
Key: testContext.currentTest.mpuKeyNameMem,
|
||||||
Metadata: { 'scal-location-constraint': memLocation },
|
Metadata: { 'scal-location-constraint': memLocation },
|
||||||
};
|
};
|
||||||
const paramsAWS = {
|
const paramsAWS = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
Key: this.currentTest.mpuKeyNameAWS,
|
Key: testContext.currentTest.mpuKeyNameAWS,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation },
|
Metadata: { 'scal-location-constraint': awsLocation },
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -158,26 +164,26 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to GCP', function describeFn() {
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
next => s3.putObject({
|
next => s3.putObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.keyNameNormalGcp,
|
Key: testContext.currentTest.keyNameNormalGcp,
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
Metadata: { 'scal-location-constraint': gcpLocation },
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.putObject({
|
next => s3.putObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.keyNameNormalGcpMismatch,
|
Key: testContext.currentTest.keyNameNormalGcpMismatch,
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
Metadata: { 'scal-location-constraint':
|
Metadata: { 'scal-location-constraint':
|
||||||
gcpLocationMismatch },
|
gcpLocationMismatch },
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.putObject({
|
next => s3.putObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.keyNameFiveMbGcp,
|
Key: testContext.currentTest.keyNameFiveMbGcp,
|
||||||
Body: fiveMbBody,
|
Body: fiveMbBody,
|
||||||
Metadata: { 'scal-location-constraint': gcpLocation },
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.putObject({
|
next => s3.putObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.keyNameFiveMbMem,
|
Key: testContext.currentTest.keyNameFiveMbMem,
|
||||||
Body: fiveMbBody,
|
Body: fiveMbBody,
|
||||||
Metadata: { 'scal-location-constraint': memLocation },
|
Metadata: { 'scal-location-constraint': memLocation },
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
|
@ -185,41 +191,41 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to GCP', function describeFn() {
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.ifError(err, 'createMultipartUpload ' +
|
assert.ifError(err, 'createMultipartUpload ' +
|
||||||
`on gcp: Expected success, got error: ${err}`);
|
`on gcp: Expected success, got error: ${err}`);
|
||||||
this.currentTest.uploadId = res.UploadId;
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
next => s3.createMultipartUpload(paramsMem,
|
next => s3.createMultipartUpload(paramsMem,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.ifError(err, 'createMultipartUpload ' +
|
assert.ifError(err, 'createMultipartUpload ' +
|
||||||
`in memory: Expected success, got error: ${err}`);
|
`in memory: Expected success, got error: ${err}`);
|
||||||
this.currentTest.uploadIdMem = res.UploadId;
|
testContext.currentTest.uploadIdMem = res.UploadId;
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
next => s3.createMultipartUpload(paramsAWS,
|
next => s3.createMultipartUpload(paramsAWS,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.ifError(err, 'createMultipartUpload ' +
|
assert.ifError(err, 'createMultipartUpload ' +
|
||||||
`on AWS: Expected success, got error: ${err}`);
|
`on AWS: Expected success, got error: ${err}`);
|
||||||
this.currentTest.uploadIdAWS = res.UploadId;
|
testContext.currentTest.uploadIdAWS = res.UploadId;
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(function afterFn(done) {
|
afterEach(done => {
|
||||||
const paramsGcp = {
|
const paramsGcp = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.mpuKeyNameGcp,
|
Key: testContext.currentTest.mpuKeyNameGcp,
|
||||||
UploadId: this.currentTest.uploadId,
|
UploadId: testContext.currentTest.uploadId,
|
||||||
};
|
};
|
||||||
const paramsMem = {
|
const paramsMem = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
Key: this.currentTest.mpuKeyNameMem,
|
Key: testContext.currentTest.mpuKeyNameMem,
|
||||||
UploadId: this.currentTest.uploadIdMem,
|
UploadId: testContext.currentTest.uploadIdMem,
|
||||||
};
|
};
|
||||||
const paramsAWS = {
|
const paramsAWS = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
Key: this.currentTest.mpuKeyNameAWS,
|
Key: testContext.currentTest.mpuKeyNameAWS,
|
||||||
UploadId: this.currentTest.uploadIdAWS,
|
UploadId: testContext.currentTest.uploadIdAWS,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.abortMultipartUpload(paramsGcp,
|
next => s3.abortMultipartUpload(paramsGcp,
|
||||||
|
@ -231,28 +237,60 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to GCP', function describeFn() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy small part from GCP to MPU with GCP location',
|
test(
|
||||||
function itFn(done) {
|
'should copy small part from GCP to MPU with GCP location',
|
||||||
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${bucket}/${this.test.keyNameNormalGcp}`,
|
`${bucket}/${testContext.test.keyNameNormalGcp}`,
|
||||||
Key: this.test.mpuKeyNameGcp,
|
Key: testContext.test.mpuKeyNameGcp,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
`success, got error: ${err}`);
|
`success, got error: ${err}`);
|
||||||
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
expect(res.ETag).toBe(`"${normalMD5}"`);
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
const infos = {
|
const infos = {
|
||||||
bucketName: bucket,
|
bucketName: bucket,
|
||||||
keyName: this.test.mpuKeyNameGcp,
|
keyName: testContext.test.mpuKeyNameGcp,
|
||||||
uploadId: this.test.uploadId,
|
uploadId: testContext.test.uploadId,
|
||||||
|
md5: normalMD5,
|
||||||
|
totalSize: normalBodySize,
|
||||||
|
};
|
||||||
|
assertCopyPart(infos, next);
|
||||||
|
},
|
||||||
|
], done);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
test('should copy small part from GCP with bucketMatch=false to ' +
|
||||||
|
'MPU with GCP location', done => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
CopySource:
|
||||||
|
`${bucket}/${testContext.test.keyNameNormalGcpMismatch}`,
|
||||||
|
Key: testContext.test.mpuKeyNameGcp,
|
||||||
|
PartNumber: 1,
|
||||||
|
UploadId: testContext.test.uploadId,
|
||||||
|
};
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
|
`success, got error: ${err}`);
|
||||||
|
expect(res.ETag).toBe(`"${normalMD5}"`);
|
||||||
|
next(err);
|
||||||
|
}),
|
||||||
|
next => {
|
||||||
|
const infos = {
|
||||||
|
bucketName: bucket,
|
||||||
|
keyName: testContext.test.mpuKeyNameGcp,
|
||||||
|
uploadId: testContext.test.uploadId,
|
||||||
md5: normalMD5,
|
md5: normalMD5,
|
||||||
totalSize: normalBodySize,
|
totalSize: normalBodySize,
|
||||||
};
|
};
|
||||||
|
@ -261,89 +299,61 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to GCP', function describeFn() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy small part from GCP with bucketMatch=false to ' +
|
test(
|
||||||
'MPU with GCP location',
|
'should copy 5 Mb part from GCP to MPU with GCP location',
|
||||||
function itFn(done) {
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${bucket}/${this.test.keyNameNormalGcpMismatch}`,
|
`${bucket}/${testContext.test.keyNameFiveMbGcp}`,
|
||||||
Key: this.test.mpuKeyNameGcp,
|
Key: testContext.test.mpuKeyNameGcp,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
`success, got error: ${err}`);
|
`success, got error: ${err}`);
|
||||||
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
expect(res.ETag).toBe(`"${fiveMbMD5}"`);
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
const infos = {
|
const infos = {
|
||||||
bucketName: bucket,
|
bucketName: bucket,
|
||||||
keyName: this.test.mpuKeyNameGcp,
|
keyName: testContext.test.mpuKeyNameGcp,
|
||||||
uploadId: this.test.uploadId,
|
uploadId: testContext.test.uploadId,
|
||||||
md5: normalMD5,
|
|
||||||
totalSize: normalBodySize,
|
|
||||||
};
|
|
||||||
assertCopyPart(infos, next);
|
|
||||||
},
|
|
||||||
], done);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should copy 5 Mb part from GCP to MPU with GCP location',
|
|
||||||
function ifF(done) {
|
|
||||||
const params = {
|
|
||||||
Bucket: bucket,
|
|
||||||
CopySource:
|
|
||||||
`${bucket}/${this.test.keyNameFiveMbGcp}`,
|
|
||||||
Key: this.test.mpuKeyNameGcp,
|
|
||||||
PartNumber: 1,
|
|
||||||
UploadId: this.test.uploadId,
|
|
||||||
};
|
|
||||||
async.waterfall([
|
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
|
||||||
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
|
||||||
`success, got error: ${err}`);
|
|
||||||
assert.strictEqual(res.ETag, `"${fiveMbMD5}"`);
|
|
||||||
next(err);
|
|
||||||
}),
|
|
||||||
next => {
|
|
||||||
const infos = {
|
|
||||||
bucketName: bucket,
|
|
||||||
keyName: this.test.mpuKeyNameGcp,
|
|
||||||
uploadId: this.test.uploadId,
|
|
||||||
md5: fiveMbMD5,
|
md5: fiveMbMD5,
|
||||||
totalSize: fiveMB,
|
totalSize: fiveMB,
|
||||||
};
|
};
|
||||||
assertCopyPart(infos, next);
|
assertCopyPart(infos, next);
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should copy part from GCP to MPU with memory location',
|
test(
|
||||||
function ifF(done) {
|
'should copy part from GCP to MPU with memory location',
|
||||||
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${bucket}/${this.test.keyNameNormalGcp}`,
|
`${bucket}/${testContext.test.keyNameNormalGcp}`,
|
||||||
Key: this.test.mpuKeyNameMem,
|
Key: testContext.test.mpuKeyNameMem,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadIdMem,
|
UploadId: testContext.test.uploadIdMem,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
`success, got error: ${err}`);
|
`success, got error: ${err}`);
|
||||||
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
expect(res.ETag).toBe(`"${normalMD5}"`);
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
s3.listParts({
|
s3.listParts({
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
Key: this.test.mpuKeyNameMem,
|
Key: testContext.test.mpuKeyNameMem,
|
||||||
UploadId: this.test.uploadIdMem,
|
UploadId: testContext.test.uploadIdMem,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.ifError(err,
|
assert.ifError(err,
|
||||||
'listParts: Expected success,' +
|
'listParts: Expected success,' +
|
||||||
|
@ -351,8 +361,8 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to GCP', function describeFn() {
|
||||||
const resultCopy =
|
const resultCopy =
|
||||||
JSON.parse(JSON.stringify(result));
|
JSON.parse(JSON.stringify(result));
|
||||||
resultCopy.Bucket = memBucketName;
|
resultCopy.Bucket = memBucketName;
|
||||||
resultCopy.Key = this.test.mpuKeyNameMem;
|
resultCopy.Key = testContext.test.mpuKeyNameMem;
|
||||||
resultCopy.UploadId = this.test.uploadIdMem;
|
resultCopy.UploadId = testContext.test.uploadIdMem;
|
||||||
resultCopy.Parts =
|
resultCopy.Parts =
|
||||||
[{ PartNumber: 1,
|
[{ PartNumber: 1,
|
||||||
LastModified: res.Parts[0].LastModified,
|
LastModified: res.Parts[0].LastModified,
|
||||||
|
@ -363,23 +373,23 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to GCP', function describeFn() {
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should copy part from GCP to MPU with AWS location',
|
test('should copy part from GCP to MPU with AWS location', done => {
|
||||||
function ifF(done) {
|
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${bucket}/${this.test.keyNameNormalGcp}`,
|
`${bucket}/${testContext.test.keyNameNormalGcp}`,
|
||||||
Key: this.test.mpuKeyNameAWS,
|
Key: testContext.test.mpuKeyNameAWS,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadIdAWS,
|
UploadId: testContext.test.uploadIdAWS,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
`success, got error: ${err}`);
|
`success, got error: ${err}`);
|
||||||
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
expect(res.ETag).toBe(`"${normalMD5}"`);
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
|
@ -388,45 +398,41 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to GCP', function describeFn() {
|
||||||
.details.bucketName;
|
.details.bucketName;
|
||||||
awsS3.listParts({
|
awsS3.listParts({
|
||||||
Bucket: awsBucket,
|
Bucket: awsBucket,
|
||||||
Key: this.test.mpuKeyNameAWS,
|
Key: testContext.test.mpuKeyNameAWS,
|
||||||
UploadId: this.test.uploadIdAWS,
|
UploadId: testContext.test.uploadIdAWS,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.ifError(err,
|
assert.ifError(err,
|
||||||
'listParts: Expected success,' +
|
'listParts: Expected success,' +
|
||||||
` got error: ${err}`);
|
` got error: ${err}`);
|
||||||
assert.strictEqual(res.Bucket, awsBucket);
|
expect(res.Bucket).toBe(awsBucket);
|
||||||
assert.strictEqual(res.Key,
|
expect(res.Key).toBe(testContext.test.mpuKeyNameAWS);
|
||||||
this.test.mpuKeyNameAWS);
|
expect(res.UploadId).toBe(testContext.test.uploadIdAWS);
|
||||||
assert.strictEqual(res.UploadId,
|
expect(res.Parts.length).toBe(1);
|
||||||
this.test.uploadIdAWS);
|
expect(res.Parts[0].PartNumber).toBe(1);
|
||||||
assert.strictEqual(res.Parts.length, 1);
|
expect(res.Parts[0].ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(res.Parts[0].PartNumber, 1);
|
expect(res.Parts[0].Size).toBe(normalBodySize);
|
||||||
assert.strictEqual(res.Parts[0].ETag,
|
|
||||||
`"${normalMD5}"`);
|
|
||||||
assert.strictEqual(res.Parts[0].Size,
|
|
||||||
normalBodySize);
|
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy part from GCP object with range to MPU ' +
|
test('should copy part from GCP object with range to MPU ' +
|
||||||
'with AWS location', function ifF(done) {
|
'with AWS location', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: memBucketName,
|
Bucket: memBucketName,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${bucket}/${this.test.keyNameNormalGcp}`,
|
`${bucket}/${testContext.test.keyNameNormalGcp}`,
|
||||||
Key: this.test.mpuKeyNameAWS,
|
Key: testContext.test.mpuKeyNameAWS,
|
||||||
CopySourceRange: 'bytes=0-5',
|
CopySourceRange: 'bytes=0-5',
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadIdAWS,
|
UploadId: testContext.test.uploadIdAWS,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
`success, got error: ${err}`);
|
`success, got error: ${err}`);
|
||||||
assert.strictEqual(res.ETag, `"${sixBytesMD5}"`);
|
expect(res.ETag).toBe(`"${sixBytesMD5}"`);
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
|
@ -435,51 +441,47 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to GCP', function describeFn() {
|
||||||
.details.bucketName;
|
.details.bucketName;
|
||||||
awsS3.listParts({
|
awsS3.listParts({
|
||||||
Bucket: awsBucket,
|
Bucket: awsBucket,
|
||||||
Key: this.test.mpuKeyNameAWS,
|
Key: testContext.test.mpuKeyNameAWS,
|
||||||
UploadId: this.test.uploadIdAWS,
|
UploadId: testContext.test.uploadIdAWS,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.ifError(err,
|
assert.ifError(err,
|
||||||
'listParts: Expected success,' +
|
'listParts: Expected success,' +
|
||||||
` got error: ${err}`);
|
` got error: ${err}`);
|
||||||
assert.strictEqual(res.Bucket, awsBucket);
|
expect(res.Bucket).toBe(awsBucket);
|
||||||
assert.strictEqual(res.Key,
|
expect(res.Key).toBe(testContext.test.mpuKeyNameAWS);
|
||||||
this.test.mpuKeyNameAWS);
|
expect(res.UploadId).toBe(testContext.test.uploadIdAWS);
|
||||||
assert.strictEqual(res.UploadId,
|
expect(res.Parts.length).toBe(1);
|
||||||
this.test.uploadIdAWS);
|
expect(res.Parts[0].PartNumber).toBe(1);
|
||||||
assert.strictEqual(res.Parts.length, 1);
|
expect(res.Parts[0].ETag).toBe(`"${sixBytesMD5}"`);
|
||||||
assert.strictEqual(res.Parts[0].PartNumber, 1);
|
expect(res.Parts[0].Size).toBe(6);
|
||||||
assert.strictEqual(res.Parts[0].ETag,
|
|
||||||
`"${sixBytesMD5}"`);
|
|
||||||
assert.strictEqual(res.Parts[0].Size, 6);
|
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy 5 Mb part from a memory location to MPU with ' +
|
test('should copy 5 Mb part from a memory location to MPU with ' +
|
||||||
'GCP location',
|
'GCP location', done => {
|
||||||
function ifF(done) {
|
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${bucket}/${this.test.keyNameFiveMbMem}`,
|
`${bucket}/${testContext.test.keyNameFiveMbMem}`,
|
||||||
Key: this.test.mpuKeyNameGcp,
|
Key: testContext.test.mpuKeyNameGcp,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
assert.ifError(err, 'uploadPartCopy: Expected ' +
|
||||||
`success, got error: ${err}`);
|
`success, got error: ${err}`);
|
||||||
assert.strictEqual(res.ETag, `"${fiveMbMD5}"`);
|
expect(res.ETag).toBe(`"${fiveMbMD5}"`);
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
const infos = {
|
const infos = {
|
||||||
bucketName: bucket,
|
bucketName: bucket,
|
||||||
keyName: this.test.mpuKeyNameGcp,
|
keyName: testContext.test.mpuKeyNameGcp,
|
||||||
uploadId: this.test.uploadId,
|
uploadId: testContext.test.uploadId,
|
||||||
md5: fiveMbMD5,
|
md5: fiveMbMD5,
|
||||||
totalSize: fiveMB,
|
totalSize: fiveMB,
|
||||||
};
|
};
|
||||||
|
@ -489,45 +491,45 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to GCP', function describeFn() {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with existing part', () => {
|
describe('with existing part', () => {
|
||||||
beforeEach(function beF(done) {
|
beforeEach(done => {
|
||||||
const params = {
|
const params = {
|
||||||
Body: oneKbBody,
|
Body: oneKbBody,
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.mpuKeyNameGcp,
|
Key: testContext.currentTest.mpuKeyNameGcp,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.currentTest.uploadId,
|
UploadId: testContext.currentTest.uploadId,
|
||||||
};
|
};
|
||||||
s3.uploadPart(params, done);
|
s3.uploadPart(params, done);
|
||||||
});
|
});
|
||||||
it('should copy part from GCP to GCP with existing ' +
|
test('should copy part from GCP to GCP with existing ' +
|
||||||
'parts', function ifF(done) {
|
'parts', done => {
|
||||||
const resultCopy = JSON.parse(JSON.stringify(result));
|
const resultCopy = JSON.parse(JSON.stringify(result));
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${bucket}/${this.test.keyNameNormalGcp}`,
|
`${bucket}/${testContext.test.keyNameNormalGcp}`,
|
||||||
Key: this.test.mpuKeyNameGcp,
|
Key: testContext.test.mpuKeyNameGcp,
|
||||||
PartNumber: 2,
|
PartNumber: 2,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(params, (err, res) => {
|
next => s3.uploadPartCopy(params, (err, res) => {
|
||||||
assert.ifError(err,
|
assert.ifError(err,
|
||||||
'uploadPartCopy: Expected success, got ' +
|
'uploadPartCopy: Expected success, got ' +
|
||||||
`error: ${err}`);
|
`error: ${err}`);
|
||||||
assert.strictEqual(res.ETag, `"${normalMD5}"`);
|
expect(res.ETag).toBe(`"${normalMD5}"`);
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => s3.listParts({
|
next => s3.listParts({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.mpuKeyNameGcp,
|
Key: testContext.test.mpuKeyNameGcp,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.ifError(err, 'listParts: Expected ' +
|
assert.ifError(err, 'listParts: Expected ' +
|
||||||
`success, got error: ${err}`);
|
`success, got error: ${err}`);
|
||||||
resultCopy.Bucket = bucket;
|
resultCopy.Bucket = bucket;
|
||||||
resultCopy.Key = this.test.mpuKeyNameGcp;
|
resultCopy.Key = testContext.test.mpuKeyNameGcp;
|
||||||
resultCopy.UploadId = this.test.uploadId;
|
resultCopy.UploadId = testContext.test.uploadId;
|
||||||
resultCopy.Parts =
|
resultCopy.Parts =
|
||||||
[{ PartNumber: 1,
|
[{ PartNumber: 1,
|
||||||
LastModified: res.Parts[0].LastModified,
|
LastModified: res.Parts[0].LastModified,
|
||||||
|
@ -543,15 +545,13 @@ describeSkipIfNotMultipleOrCeph('Put Copy Part to GCP', function describeFn() {
|
||||||
}),
|
}),
|
||||||
next => gcpClient.listParts({
|
next => gcpClient.listParts({
|
||||||
Bucket: gcpBucketMPU,
|
Bucket: gcpBucketMPU,
|
||||||
Key: this.test.mpuKeyNameGcp,
|
Key: testContext.test.mpuKeyNameGcp,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.ifError(err, 'GCP listParts: Expected ' +
|
assert.ifError(err, 'GCP listParts: Expected ' +
|
||||||
`success, got error: ${err}`);
|
`success, got error: ${err}`);
|
||||||
assert.strictEqual(
|
expect(res.Contents[0].ETag).toBe(`"${oneKbMD5}"`);
|
||||||
res.Contents[0].ETag, `"${oneKbMD5}"`);
|
expect(res.Contents[1].ETag).toBe(`"${normalMD5}"`);
|
||||||
assert.strictEqual(
|
|
||||||
res.Contents[1].ETag, `"${normalMD5}"`);
|
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
|
@ -592,15 +592,21 @@ function describeF() {
|
||||||
});
|
});
|
||||||
describe('Basic test with complete MPU from AWS to GCP location: ',
|
describe('Basic test with complete MPU from AWS to GCP location: ',
|
||||||
() => {
|
() => {
|
||||||
beforeEach(function beF(done) {
|
let testContext;
|
||||||
this.currentTest.keyNameAws =
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
testContext.currentTest.keyNameAws =
|
||||||
`onehundredandfivembgcp${uniqName(keyObjectAWS)}`;
|
`onehundredandfivembgcp${uniqName(keyObjectAWS)}`;
|
||||||
this.currentTest.mpuKeyNameGcp =
|
testContext.currentTest.mpuKeyNameGcp =
|
||||||
`mpukeyname${uniqName(keyObjectGcp)}`;
|
`mpukeyname${uniqName(keyObjectGcp)}`;
|
||||||
|
|
||||||
const createMpuParams = {
|
const createMpuParams = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.currentTest.mpuKeyNameGcp,
|
Key: testContext.currentTest.mpuKeyNameGcp,
|
||||||
Metadata: { 'scal-location-constraint': gcpLocation },
|
Metadata: { 'scal-location-constraint': gcpLocation },
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -610,57 +616,54 @@ function describeF() {
|
||||||
err => next(err)),
|
err => next(err)),
|
||||||
next => s3.putObject({
|
next => s3.putObject({
|
||||||
Bucket: awsBucketName,
|
Bucket: awsBucketName,
|
||||||
Key: this.currentTest.keyNameAws,
|
Key: testContext.currentTest.keyNameAws,
|
||||||
Body: fiveMbBody,
|
Body: fiveMbBody,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation },
|
Metadata: { 'scal-location-constraint': awsLocation },
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.createMultipartUpload(createMpuParams,
|
next => s3.createMultipartUpload(createMpuParams,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'createMultipartUpload: ' +
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error: ${err}`);
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
this.currentTest.uploadId = res.UploadId;
|
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy two 5 MB part from GCP to MPU with GCP' +
|
test('should copy two 5 MB part from GCP to MPU with GCP' +
|
||||||
'location', function ifF(done) {
|
'location', done => {
|
||||||
const uploadParams = {
|
const uploadParams = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${awsBucketName}/` +
|
`${awsBucketName}/` +
|
||||||
`${this.test.keyNameAws}`,
|
`${testContext.test.keyNameAws}`,
|
||||||
Key: this.test.mpuKeyNameGcp,
|
Key: testContext.test.mpuKeyNameGcp,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
const uploadParams2 = {
|
const uploadParams2 = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
CopySource:
|
CopySource:
|
||||||
`${awsBucketName}/` +
|
`${awsBucketName}/` +
|
||||||
`${this.test.keyNameAws}`,
|
`${testContext.test.keyNameAws}`,
|
||||||
Key: this.test.mpuKeyNameGcp,
|
Key: testContext.test.mpuKeyNameGcp,
|
||||||
PartNumber: 2,
|
PartNumber: 2,
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.uploadPartCopy(uploadParams, (err, res) => {
|
next => s3.uploadPartCopy(uploadParams, (err, res) => {
|
||||||
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
expect(res.ETag).toBe(`"${fiveMbMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${fiveMbMD5}"`);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => s3.uploadPartCopy(uploadParams2, (err, res) => {
|
next => s3.uploadPartCopy(uploadParams2, (err, res) => {
|
||||||
assert.equal(err, null, 'uploadPartCopy: Expected ' +
|
expect(err).toEqual(null);
|
||||||
`success, got error: ${err}`);
|
expect(res.ETag).toBe(`"${fiveMbMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${fiveMbMD5}"`);
|
|
||||||
next(err);
|
next(err);
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
const completeMpuParams = {
|
const completeMpuParams = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: this.test.mpuKeyNameGcp,
|
Key: testContext.test.mpuKeyNameGcp,
|
||||||
MultipartUpload: {
|
MultipartUpload: {
|
||||||
Parts: [
|
Parts: [
|
||||||
{
|
{
|
||||||
|
@ -673,15 +676,13 @@ function describeF() {
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
UploadId: this.test.uploadId,
|
UploadId: testContext.test.uploadId,
|
||||||
};
|
};
|
||||||
s3.completeMultipartUpload(completeMpuParams,
|
s3.completeMultipartUpload(completeMpuParams,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'completeMultipartUpload:' +
|
expect(err).toEqual(null);
|
||||||
` Expected success, got error: ${err}`);
|
expect(res.Bucket).toBe(bucket);
|
||||||
assert.strictEqual(res.Bucket, bucket);
|
expect(res.Key).toBe(testContext.test.mpuKeyNameGcp);
|
||||||
assert.strictEqual(res.Key,
|
|
||||||
this.test.mpuKeyNameGcp);
|
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
|
@ -44,15 +44,11 @@ const tagObj = { key1: 'value1', key2: 'value2' };
|
||||||
|
|
||||||
function getAndAssertObjectTags(tagParams, callback) {
|
function getAndAssertObjectTags(tagParams, callback) {
|
||||||
return s3.getObjectTagging(tagParams, (err, res) => {
|
return s3.getObjectTagging(tagParams, (err, res) => {
|
||||||
assert.strictEqual(res.TagSet.length, 2);
|
expect(res.TagSet.length).toBe(2);
|
||||||
assert.strictEqual(res.TagSet[0].Key,
|
expect(res.TagSet[0].Key).toBe(putTags.TagSet[0].Key);
|
||||||
putTags.TagSet[0].Key);
|
expect(res.TagSet[0].Value).toBe(putTags.TagSet[0].Value);
|
||||||
assert.strictEqual(res.TagSet[0].Value,
|
expect(res.TagSet[1].Key).toBe(putTags.TagSet[1].Key);
|
||||||
putTags.TagSet[0].Value);
|
expect(res.TagSet[1].Value).toBe(putTags.TagSet[1].Value);
|
||||||
assert.strictEqual(res.TagSet[1].Key,
|
|
||||||
putTags.TagSet[1].Key);
|
|
||||||
assert.strictEqual(res.TagSet[1].Value,
|
|
||||||
putTags.TagSet[1].Value);
|
|
||||||
return callback();
|
return callback();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -61,18 +57,18 @@ function getAndAssertObjectTags(tagParams, callback) {
|
||||||
function awsGet(key, tagCheck, isEmpty, isMpu, callback) {
|
function awsGet(key, tagCheck, isEmpty, isMpu, callback) {
|
||||||
process.stdout.write('Getting object from AWS\n');
|
process.stdout.write('Getting object from AWS\n');
|
||||||
getAwsRetry({ key }, 0, (err, res) => {
|
getAwsRetry({ key }, 0, (err, res) => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
if (isEmpty) {
|
if (isEmpty) {
|
||||||
assert.strictEqual(res.ETag, `"${emptyMD5}"`);
|
expect(res.ETag).toBe(`"${emptyMD5}"`);
|
||||||
} else if (isMpu) {
|
} else if (isMpu) {
|
||||||
assert.strictEqual(res.ETag, `"${mpuMD5}"`);
|
expect(res.ETag).toBe(`"${mpuMD5}"`);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
}
|
}
|
||||||
if (tagCheck) {
|
if (tagCheck) {
|
||||||
assert.strictEqual(res.TagCount, '2');
|
expect(res.TagCount).toBe('2');
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(res.TagCount, undefined);
|
expect(res.TagCount).toBe(undefined);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
});
|
});
|
||||||
|
@ -82,18 +78,17 @@ function azureGet(key, tagCheck, isEmpty, callback) {
|
||||||
process.stdout.write('Getting object from Azure\n');
|
process.stdout.write('Getting object from Azure\n');
|
||||||
azureClient.getBlobProperties(azureContainerName, key,
|
azureClient.getBlobProperties(azureContainerName, key,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
const resMD5 = convertMD5(res.contentSettings.contentMD5);
|
const resMD5 = convertMD5(res.contentSettings.contentMD5);
|
||||||
if (isEmpty) {
|
if (isEmpty) {
|
||||||
assert.strictEqual(resMD5, `${emptyMD5}`);
|
expect(resMD5).toBe(`${emptyMD5}`);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(resMD5, `${correctMD5}`);
|
expect(resMD5).toBe(`${correctMD5}`);
|
||||||
}
|
}
|
||||||
if (tagCheck) {
|
if (tagCheck) {
|
||||||
assert.strictEqual(res.metadata.tags,
|
expect(res.metadata.tags).toBe(JSON.stringify(tagObj));
|
||||||
JSON.stringify(tagObj));
|
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(res.metadata.tags, undefined);
|
expect(res.metadata.tags).toBe(undefined);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
});
|
});
|
||||||
|
@ -103,20 +98,19 @@ function getObject(key, backend, tagCheck, isEmpty, isMpu, callback) {
|
||||||
function get(cb) {
|
function get(cb) {
|
||||||
process.stdout.write('Getting object\n');
|
process.stdout.write('Getting object\n');
|
||||||
s3.getObject({ Bucket: bucket, Key: key }, (err, res) => {
|
s3.getObject({ Bucket: bucket, Key: key }, (err, res) => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
if (isEmpty) {
|
if (isEmpty) {
|
||||||
assert.strictEqual(res.ETag, `"${emptyMD5}"`);
|
expect(res.ETag).toBe(`"${emptyMD5}"`);
|
||||||
} else if (isMpu) {
|
} else if (isMpu) {
|
||||||
assert.strictEqual(res.ETag, `"${mpuMD5}"`);
|
expect(res.ETag).toBe(`"${mpuMD5}"`);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.Metadata['scal-location-constraint'],
|
expect(res.Metadata['scal-location-constraint']).toBe(backend);
|
||||||
backend);
|
|
||||||
if (tagCheck) {
|
if (tagCheck) {
|
||||||
assert.strictEqual(res.TagCount, '2');
|
expect(res.TagCount).toBe('2');
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(res.TagCount, undefined);
|
expect(res.TagCount).toBe(undefined);
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
|
@ -135,14 +129,14 @@ function getObject(key, backend, tagCheck, isEmpty, isMpu, callback) {
|
||||||
function mpuWaterfall(params, cb) {
|
function mpuWaterfall(params, cb) {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createMultipartUpload(params, (err, data) => {
|
next => s3.createMultipartUpload(params, (err, data) => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
next(null, data.UploadId);
|
next(null, data.UploadId);
|
||||||
}),
|
}),
|
||||||
(uploadId, next) => {
|
(uploadId, next) => {
|
||||||
const partParams = { Bucket: bucket, Key: params.Key, PartNumber: 1,
|
const partParams = { Bucket: bucket, Key: params.Key, PartNumber: 1,
|
||||||
UploadId: uploadId, Body: body };
|
UploadId: uploadId, Body: body };
|
||||||
s3.uploadPart(partParams, (err, result) => {
|
s3.uploadPart(partParams, (err, result) => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
next(null, uploadId, result.ETag);
|
next(null, uploadId, result.ETag);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
@ -153,12 +147,12 @@ function mpuWaterfall(params, cb) {
|
||||||
},
|
},
|
||||||
UploadId: uploadId };
|
UploadId: uploadId };
|
||||||
s3.completeMultipartUpload(compParams, err => {
|
s3.completeMultipartUpload(compParams, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
next();
|
next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
], err => {
|
], err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -197,21 +191,19 @@ function testSuite() {
|
||||||
testBackends.forEach(backend => {
|
testBackends.forEach(backend => {
|
||||||
const itSkipIfAzureOrCeph = backend === 'azurebackend' ||
|
const itSkipIfAzureOrCeph = backend === 'azurebackend' ||
|
||||||
isCEPH ? it.skip : it;
|
isCEPH ? it.skip : it;
|
||||||
it(`should put an object with tags to ${backend} backend`,
|
test(`should put an object with tags to ${backend} backend`, done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = Object.assign({ Key: key, Tagging: tagString,
|
const params = Object.assign({ Key: key, Tagging: tagString,
|
||||||
Metadata: { 'scal-location-constraint': backend } },
|
Metadata: { 'scal-location-constraint': backend } },
|
||||||
putParams);
|
putParams);
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
getObject(key, backend, true, false, false, done);
|
getObject(key, backend, true, false, false, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should put a 0 byte object with tags to ${backend} backend`,
|
test(`should put a 0 byte object with tags to ${backend} backend`, done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -221,30 +213,30 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
getObject(key, backend, true, true, false, done);
|
getObject(key, backend, true, true, false, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should put tags to preexisting object in ${backend} ` +
|
test(`should put tags to preexisting object in ${backend} ` +
|
||||||
'backend', done => {
|
'backend', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = Object.assign({ Key: key, Metadata:
|
const params = Object.assign({ Key: key, Metadata:
|
||||||
{ 'scal-location-constraint': backend } }, putParams);
|
{ 'scal-location-constraint': backend } }, putParams);
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
const putTagParams = { Bucket: bucket, Key: key,
|
const putTagParams = { Bucket: bucket, Key: key,
|
||||||
Tagging: putTags };
|
Tagging: putTags };
|
||||||
process.stdout.write('Putting object tags\n');
|
process.stdout.write('Putting object tags\n');
|
||||||
s3.putObjectTagging(putTagParams, err => {
|
s3.putObjectTagging(putTagParams, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
getObject(key, backend, true, false, false, done);
|
getObject(key, backend, true, false, false, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put tags to preexisting 0 byte object in ' +
|
test('should put tags to preexisting 0 byte object in ' +
|
||||||
`${backend} backend`, done => {
|
`${backend} backend`, done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -254,12 +246,12 @@ function testSuite() {
|
||||||
};
|
};
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
const putTagParams = { Bucket: bucket, Key: key,
|
const putTagParams = { Bucket: bucket, Key: key,
|
||||||
Tagging: putTags };
|
Tagging: putTags };
|
||||||
process.stdout.write('Putting object tags\n');
|
process.stdout.write('Putting object tags\n');
|
||||||
s3.putObjectTagging(putTagParams, err => {
|
s3.putObjectTagging(putTagParams, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
getObject(key, backend, true, true, false, done);
|
getObject(key, backend, true, true, false, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -278,31 +270,30 @@ function testSuite() {
|
||||||
Tagging: putTags };
|
Tagging: putTags };
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
s3.putObjectTagging(putTagParams, err => {
|
s3.putObjectTagging(putTagParams, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
getObject(key, backend, true, false, true, done);
|
getObject(key, backend, true, false, true, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not return error putting tags to correct object ' +
|
test('should not return error putting tags to correct object ' +
|
||||||
'version in AWS, even if a delete marker was created directly ' +
|
'version in AWS, even if a delete marker was created directly ' +
|
||||||
'on AWS before tags are put',
|
'on AWS before tags are put', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = Object.assign({ Key: key, Metadata:
|
const params = Object.assign({ Key: key, Metadata:
|
||||||
{ 'scal-location-constraint': awsLocation } }, putParams);
|
{ 'scal-location-constraint': awsLocation } }, putParams);
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
process.stdout.write('Deleting object from AWS\n');
|
process.stdout.write('Deleting object from AWS\n');
|
||||||
awsS3.deleteObject({ Bucket: awsBucket, Key: key }, err => {
|
awsS3.deleteObject({ Bucket: awsBucket, Key: key }, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
const putTagParams = { Bucket: bucket, Key: key,
|
const putTagParams = { Bucket: bucket, Key: key,
|
||||||
Tagging: putTags };
|
Tagging: putTags };
|
||||||
process.stdout.write('Putting object tags\n');
|
process.stdout.write('Putting object tags\n');
|
||||||
s3.putObjectTagging(putTagParams, err => {
|
s3.putObjectTagging(putTagParams, err => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -312,22 +303,21 @@ function testSuite() {
|
||||||
|
|
||||||
describe('getObjectTagging', () => {
|
describe('getObjectTagging', () => {
|
||||||
testBackends.forEach(backend => {
|
testBackends.forEach(backend => {
|
||||||
it(`should get tags from object on ${backend} backend`,
|
test(`should get tags from object on ${backend} backend`, done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = Object.assign({ Key: key, Tagging: tagString,
|
const params = Object.assign({ Key: key, Tagging: tagString,
|
||||||
Metadata: { 'scal-location-constraint': backend } },
|
Metadata: { 'scal-location-constraint': backend } },
|
||||||
putParams);
|
putParams);
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
const tagParams = { Bucket: bucket, Key: key };
|
const tagParams = { Bucket: bucket, Key: key };
|
||||||
getAndAssertObjectTags(tagParams, done);
|
getAndAssertObjectTags(tagParams, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not return error on getting tags from object that has ' +
|
test('should not return error on getting tags from object that has ' +
|
||||||
'had a delete marker put directly on AWS', done => {
|
'had a delete marker put directly on AWS', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = Object.assign({ Key: key, Tagging: tagString,
|
const params = Object.assign({ Key: key, Tagging: tagString,
|
||||||
|
@ -335,10 +325,10 @@ function testSuite() {
|
||||||
putParams);
|
putParams);
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
process.stdout.write('Deleting object from AWS\n');
|
process.stdout.write('Deleting object from AWS\n');
|
||||||
awsS3.deleteObject({ Bucket: awsBucket, Key: key }, err => {
|
awsS3.deleteObject({ Bucket: awsBucket, Key: key }, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
const tagParams = { Bucket: bucket, Key: key };
|
const tagParams = { Bucket: bucket, Key: key };
|
||||||
getAndAssertObjectTags(tagParams, done);
|
getAndAssertObjectTags(tagParams, done);
|
||||||
});
|
});
|
||||||
|
@ -348,25 +338,24 @@ function testSuite() {
|
||||||
|
|
||||||
describe('deleteObjectTagging', () => {
|
describe('deleteObjectTagging', () => {
|
||||||
testBackends.forEach(backend => {
|
testBackends.forEach(backend => {
|
||||||
it(`should delete tags from object on ${backend} backend`,
|
test(`should delete tags from object on ${backend} backend`, done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = Object.assign({ Key: key, Tagging: tagString,
|
const params = Object.assign({ Key: key, Tagging: tagString,
|
||||||
Metadata: { 'scal-location-constraint': backend } },
|
Metadata: { 'scal-location-constraint': backend } },
|
||||||
putParams);
|
putParams);
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
const tagParams = { Bucket: bucket, Key: key };
|
const tagParams = { Bucket: bucket, Key: key };
|
||||||
s3.deleteObjectTagging(tagParams, err => {
|
s3.deleteObjectTagging(tagParams, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
getObject(key, backend, false, false, false, done);
|
getObject(key, backend, false, false, false, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not return error on deleting tags from object that ' +
|
test('should not return error on deleting tags from object that ' +
|
||||||
'has had delete markers put directly on AWS', done => {
|
'has had delete markers put directly on AWS', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = Object.assign({ Key: key, Tagging: tagString,
|
const params = Object.assign({ Key: key, Tagging: tagString,
|
||||||
|
@ -374,13 +363,13 @@ function testSuite() {
|
||||||
putParams);
|
putParams);
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
process.stdout.write('Deleting object from AWS\n');
|
process.stdout.write('Deleting object from AWS\n');
|
||||||
awsS3.deleteObject({ Bucket: awsBucket, Key: key }, err => {
|
awsS3.deleteObject({ Bucket: awsBucket, Key: key }, err => {
|
||||||
assert.equal(err, null);
|
expect(err).toEqual(null);
|
||||||
const tagParams = { Bucket: bucket, Key: key };
|
const tagParams = { Bucket: bucket, Key: key };
|
||||||
s3.deleteObjectTagging(tagParams, err => {
|
s3.deleteObjectTagging(tagParams, err => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -43,7 +43,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning not configured: should delete a tag set on the ' +
|
test('versioning not configured: should delete a tag set on the ' +
|
||||||
'latest version if no version is specified', done => {
|
'latest version if no version is specified', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -56,7 +56,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning not configured: should delete a tag set on the ' +
|
test('versioning not configured: should delete a tag set on the ' +
|
||||||
'version if specified (null)', done => {
|
'version if specified (null)', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -69,7 +69,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning suspended: should delete a tag set on the latest ' +
|
test('versioning suspended: should delete a tag set on the latest ' +
|
||||||
'version if no version is specified', done => {
|
'version if no version is specified', done => {
|
||||||
const data = [undefined, 'test1', 'test2'];
|
const data = [undefined, 'test1', 'test2'];
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
|
@ -83,7 +83,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning suspended: should delete a tag set on a specific ' +
|
test('versioning suspended: should delete a tag set on a specific ' +
|
||||||
'version (null)', done => {
|
'version (null)', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -98,7 +98,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning enabled then suspended: should delete a tag set on ' +
|
test('versioning enabled then suspended: should delete a tag set on ' +
|
||||||
'a specific (non-null) version if specified', done => {
|
'a specific (non-null) version if specified', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -119,7 +119,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning enabled: should delete a tag set on the latest ' +
|
test('versioning enabled: should delete a tag set on the latest ' +
|
||||||
'version if no version is specified', done => {
|
'version if no version is specified', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -133,7 +133,8 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning enabled: should delete a tag set on a specific version',
|
test(
|
||||||
|
'versioning enabled: should delete a tag set on a specific version',
|
||||||
done => {
|
done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -146,9 +147,10 @@ function testSuite() {
|
||||||
versionId, expectedVersionId: versionId }, next),
|
versionId, expectedVersionId: versionId }, next),
|
||||||
next => awsGetAssertTags({ key, expectedTags: {} }, next),
|
next => awsGetAssertTags({ key, expectedTags: {} }, next),
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('versioning enabled: should delete a tag set on a specific ' +
|
test('versioning enabled: should delete a tag set on a specific ' +
|
||||||
'version that is not the latest version', done => {
|
'version that is not the latest version', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -171,7 +173,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning suspended then enabled: should delete a tag set on ' +
|
test('versioning suspended then enabled: should delete a tag set on ' +
|
||||||
'a specific version (null) if specified', done => {
|
'a specific version (null) if specified', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -191,9 +193,8 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an ServiceUnavailable if trying to delete ' +
|
test('should return an ServiceUnavailable if trying to delete ' +
|
||||||
'tags from object that was deleted from AWS directly',
|
'tags from object that was deleted from AWS directly', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.putObject({ Bucket: bucket, Key: key }, next),
|
next => s3.putObject({ Bucket: bucket, Key: key }, next),
|
||||||
|
@ -205,9 +206,8 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an ServiceUnavailable if trying to delete ' +
|
test('should return an ServiceUnavailable if trying to delete ' +
|
||||||
'tags from object that was deleted from AWS directly',
|
'tags from object that was deleted from AWS directly', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.putObject({ Bucket: bucket, Key: key }, next),
|
next => s3.putObject({ Bucket: bucket, Key: key }, next),
|
||||||
|
|
|
@ -45,7 +45,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning not configured: should put/get a tag set on the ' +
|
test('versioning not configured: should put/get a tag set on the ' +
|
||||||
'latest version if no version is specified', done => {
|
'latest version if no version is specified', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -59,7 +59,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning not configured: should put/get a tag set on a ' +
|
test('versioning not configured: should put/get a tag set on a ' +
|
||||||
'specific version if specified (null)', done => {
|
'specific version if specified (null)', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -74,7 +74,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning suspended: should put/get a tag set on the latest ' +
|
test('versioning suspended: should put/get a tag set on the latest ' +
|
||||||
'version if no version is specified', done => {
|
'version if no version is specified', done => {
|
||||||
const data = [undefined, 'test1', 'test2'];
|
const data = [undefined, 'test1', 'test2'];
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
|
@ -89,7 +89,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning suspended: should put/get a tag set on a specific ' +
|
test('versioning suspended: should put/get a tag set on a specific ' +
|
||||||
'version (null)', done => {
|
'version (null)', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -105,7 +105,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning enabled then suspended: should put/get a tag set on ' +
|
test('versioning enabled then suspended: should put/get a tag set on ' +
|
||||||
'a specific (non-null) version if specified', done => {
|
'a specific (non-null) version if specified', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -126,7 +126,7 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning enabled: should put/get a tag set on the latest ' +
|
test('versioning enabled: should put/get a tag set on the latest ' +
|
||||||
'version if no version is specified', done => {
|
'version if no version is specified', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -141,7 +141,8 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('versioning enabled: should put/get a tag set on a specific version',
|
test(
|
||||||
|
'versioning enabled: should put/get a tag set on a specific version',
|
||||||
done => {
|
done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -156,9 +157,11 @@ function testSuite() {
|
||||||
(versionId, next) => awsGetAssertTags({ key,
|
(versionId, next) => awsGetAssertTags({ key,
|
||||||
expectedTags: tags }, next),
|
expectedTags: tags }, next),
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('versioning enabled: should put/get a tag set on a specific version',
|
test(
|
||||||
|
'versioning enabled: should put/get a tag set on a specific version',
|
||||||
done => {
|
done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -171,9 +174,10 @@ function testSuite() {
|
||||||
versionId, expectedVersionId: versionId }, next),
|
versionId, expectedVersionId: versionId }, next),
|
||||||
next => awsGetAssertTags({ key, expectedTags: {} }, next),
|
next => awsGetAssertTags({ key, expectedTags: {} }, next),
|
||||||
], done);
|
], done);
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('versioning enabled: should put/get a tag set on a specific ' +
|
test('versioning enabled: should put/get a tag set on a specific ' +
|
||||||
'version that is not the latest version', done => {
|
'version that is not the latest version', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -197,7 +201,7 @@ function testSuite() {
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('versioning suspended then enabled: should put/get a tag set on ' +
|
test('versioning suspended then enabled: should put/get a tag set on ' +
|
||||||
'a specific version (null) if specified', done => {
|
'a specific version (null) if specified', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -217,9 +221,8 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get tags for an object even if it was deleted from ' +
|
test('should get tags for an object even if it was deleted from ' +
|
||||||
'AWS directly (we rely on s3 metadata)',
|
'AWS directly (we rely on s3 metadata)', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.putObject({ Bucket: bucket, Key: key }, next),
|
next => s3.putObject({ Bucket: bucket, Key: key }, next),
|
||||||
|
@ -234,9 +237,8 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an ServiceUnavailable if trying to put ' +
|
test('should return an ServiceUnavailable if trying to put ' +
|
||||||
'tags from object that was deleted from AWS directly',
|
'tags from object that was deleted from AWS directly', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.putObject({ Bucket: bucket, Key: key }, next),
|
next => s3.putObject({ Bucket: bucket, Key: key }, next),
|
||||||
|
@ -248,9 +250,8 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get tags for an version even if it was deleted from ' +
|
test('should get tags for an version even if it was deleted from ' +
|
||||||
'AWS directly (we rely on s3 metadata)',
|
'AWS directly (we rely on s3 metadata)', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => enableVersioning(s3, bucket, next),
|
next => enableVersioning(s3, bucket, next),
|
||||||
|
@ -268,9 +269,8 @@ function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an ServiceUnavailable if trying to put ' +
|
test('should return an ServiceUnavailable if trying to put ' +
|
||||||
'tags on version that was deleted from AWS directly',
|
'tags on version that was deleted from AWS directly', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.putObject({ Bucket: bucket, Key: key }, next),
|
next => s3.putObject({ Bucket: bucket, Key: key }, next),
|
||||||
|
|
|
@ -27,28 +27,24 @@ const retryTimeout = 10000;
|
||||||
|
|
||||||
function getAwsSuccess(key, awsMD5, location, cb) {
|
function getAwsSuccess(key, awsMD5, location, cb) {
|
||||||
return getAwsRetry({ key }, 0, (err, res) => {
|
return getAwsRetry({ key }, 0, (err, res) => {
|
||||||
assert.strictEqual(err, null, 'Expected success, got error ' +
|
expect(err).toBe(null);
|
||||||
`on direct AWS call: ${err}`);
|
|
||||||
if (location === awsLocationEncryption) {
|
if (location === awsLocationEncryption) {
|
||||||
// doesn't check ETag because it's different
|
// doesn't check ETag because it's different
|
||||||
// with every PUT with encryption
|
// with every PUT with encryption
|
||||||
assert.strictEqual(res.ServerSideEncryption, 'AES256');
|
expect(res.ServerSideEncryption).toBe('AES256');
|
||||||
}
|
}
|
||||||
if (process.env.ENABLE_KMS_ENCRYPTION !== 'true') {
|
if (process.env.ENABLE_KMS_ENCRYPTION !== 'true') {
|
||||||
assert.strictEqual(res.ETag, `"${awsMD5}"`);
|
expect(res.ETag).toBe(`"${awsMD5}"`);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.Metadata['scal-location-constraint'],
|
expect(res.Metadata['scal-location-constraint']).toBe(location);
|
||||||
location);
|
|
||||||
return cb(res);
|
return cb(res);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function getAwsError(key, expectedError, cb) {
|
function getAwsError(key, expectedError, cb) {
|
||||||
return getAwsRetry({ key }, 0, err => {
|
return getAwsRetry({ key }, 0, err => {
|
||||||
assert.notStrictEqual(err, undefined,
|
expect(err).not.toBe(undefined);
|
||||||
'Expected error but did not find one');
|
expect(err.code).toBe(expectedError);
|
||||||
assert.strictEqual(err.code, expectedError,
|
|
||||||
`Expected error code ${expectedError} but got ${err.code}`);
|
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -63,17 +59,15 @@ function awsGetCheck(objectKey, s3MD5, awsMD5, location, cb) {
|
||||||
s3.getObject({ Bucket: bucket, Key: objectKey }, s3GetCallback);
|
s3.getObject({ Bucket: bucket, Key: objectKey }, s3GetCallback);
|
||||||
}, retryTimeout);
|
}, retryTimeout);
|
||||||
}
|
}
|
||||||
assert.strictEqual(err, null, 'Expected success, got error ' +
|
expect(err).toBe(null);
|
||||||
`on call to AWS through S3: ${err}`);
|
expect(res.ETag).toBe(`"${s3MD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${s3MD5}"`);
|
expect(res.Metadata['scal-location-constraint']).toBe(location);
|
||||||
assert.strictEqual(res.Metadata['scal-location-constraint'],
|
|
||||||
location);
|
|
||||||
process.stdout.write('Getting object from AWS\n');
|
process.stdout.write('Getting object from AWS\n');
|
||||||
return getAwsSuccess(objectKey, awsMD5, location, cb);
|
return getAwsSuccess(objectKey, awsMD5, location, cb);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('MultipleBackend put object', function testSuite() {
|
describe('MultipleBackend put object', () => {
|
||||||
this.timeout(250000);
|
this.timeout(250000);
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
|
@ -103,16 +97,17 @@ describe('MultipleBackend put object', function testSuite() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error to put request without a valid bucket name',
|
test(
|
||||||
|
'should return an error to put request without a valid bucket name',
|
||||||
done => {
|
done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
s3.putObject({ Bucket: '', Key: key }, err => {
|
s3.putObject({ Bucket: '', Key: key }, err => {
|
||||||
assert.notEqual(err, null,
|
expect(err).not.toEqual(null);
|
||||||
'Expected failure but got success');
|
expect(err.code).toBe('MethodNotAllowed');
|
||||||
assert.strictEqual(err.code, 'MethodNotAllowed');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
describeSkipIfNotMultiple('with set location from "x-amz-meta-scal-' +
|
describeSkipIfNotMultiple('with set location from "x-amz-meta-scal-' +
|
||||||
'location-constraint" header', function describe() {
|
'location-constraint" header', function describe() {
|
||||||
|
@ -120,117 +115,106 @@ describe('MultipleBackend put object', function testSuite() {
|
||||||
this.retries(2);
|
this.retries(2);
|
||||||
}
|
}
|
||||||
|
|
||||||
it('should return an error to put request without a valid ' +
|
test('should return an error to put request without a valid ' +
|
||||||
'location constraint', done => {
|
'location constraint', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': 'fail-region' } };
|
Metadata: { 'scal-location-constraint': 'fail-region' } };
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.notEqual(err, null, 'Expected failure but got ' +
|
expect(err).not.toEqual(null);
|
||||||
'success');
|
expect(err.code).toBe('InvalidArgument');
|
||||||
assert.strictEqual(err.code, 'InvalidArgument');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put an object to mem', done => {
|
test('should put an object to mem', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': memLocation },
|
Metadata: { 'scal-location-constraint': memLocation },
|
||||||
};
|
};
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
s3.getObject({ Bucket: bucket, Key: key }, (err, res) => {
|
s3.getObject({ Bucket: bucket, Key: key }, (err, res) => {
|
||||||
assert.strictEqual(err, null, 'Expected success, ' +
|
expect(err).toBe(null);
|
||||||
`got error ${err}`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put a 0-byte object to mem', done => {
|
test('should put a 0-byte object to mem', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
Metadata: { 'scal-location-constraint': memLocation },
|
Metadata: { 'scal-location-constraint': memLocation },
|
||||||
};
|
};
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
s3.getObject({ Bucket: bucket, Key: key },
|
s3.getObject({ Bucket: bucket, Key: key },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.strictEqual(err, null, 'Expected success, ' +
|
expect(err).toBe(null);
|
||||||
`got error ${err}`);
|
expect(res.ETag).toBe(`"${emptyMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${emptyMD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put a 0-byte object to AWS', done => {
|
test('should put a 0-byte object to AWS', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation },
|
Metadata: { 'scal-location-constraint': awsLocation },
|
||||||
};
|
};
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
return awsGetCheck(key, emptyMD5, emptyMD5, awsLocation,
|
return awsGetCheck(key, emptyMD5, emptyMD5, awsLocation,
|
||||||
() => done());
|
() => done());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put an object to file', done => {
|
test('should put an object to file', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': fileLocation },
|
Metadata: { 'scal-location-constraint': fileLocation },
|
||||||
};
|
};
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
s3.getObject({ Bucket: bucket, Key: key }, (err, res) => {
|
s3.getObject({ Bucket: bucket, Key: key }, (err, res) => {
|
||||||
assert.strictEqual(err, null, 'Expected success, ' +
|
expect(err).toBe(null);
|
||||||
`got error ${err}`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put an object to AWS', done => {
|
test('should put an object to AWS', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation } };
|
Metadata: { 'scal-location-constraint': awsLocation } };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
return awsGetCheck(key, correctMD5, correctMD5, awsLocation,
|
return awsGetCheck(key, correctMD5, correctMD5, awsLocation,
|
||||||
() => done());
|
() => done());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should encrypt body only if bucket encrypted putting ' +
|
test('should encrypt body only if bucket encrypted putting ' +
|
||||||
'object to AWS',
|
'object to AWS', done => {
|
||||||
done => {
|
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation } };
|
Metadata: { 'scal-location-constraint': awsLocation } };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
return getAwsSuccess(key, correctMD5, awsLocation,
|
return getAwsSuccess(key, correctMD5, awsLocation,
|
||||||
() => done());
|
() => done());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should put an object to AWS with encryption', done => {
|
test('should put an object to AWS with encryption', done => {
|
||||||
// Test refuses to skip using itSkipCeph so just mark it passed
|
// Test refuses to skip using itSkipCeph so just mark it passed
|
||||||
if (isCEPH) {
|
if (isCEPH) {
|
||||||
return done();
|
return done();
|
||||||
|
@ -241,14 +225,13 @@ describe('MultipleBackend put object', function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint':
|
Metadata: { 'scal-location-constraint':
|
||||||
awsLocationEncryption } };
|
awsLocationEncryption } };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
return awsGetCheck(key, correctMD5, correctMD5,
|
return awsGetCheck(key, correctMD5, correctMD5,
|
||||||
awsLocationEncryption, () => done());
|
awsLocationEncryption, () => done());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return a version id putting object to ' +
|
test('should return a version id putting object to ' +
|
||||||
'to AWS with versioning enabled', done => {
|
'to AWS with versioning enabled', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key, Body: body,
|
const params = { Bucket: bucket, Key: key, Body: body,
|
||||||
|
@ -259,9 +242,8 @@ describe('MultipleBackend put object', function testSuite() {
|
||||||
VersioningConfiguration: versioningEnabled,
|
VersioningConfiguration: versioningEnabled,
|
||||||
}, err => next(err)),
|
}, err => next(err)),
|
||||||
next => s3.putObject(params, (err, res) => {
|
next => s3.putObject(params, (err, res) => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`putting object, got error ${err}`);
|
expect(res.VersionId).toBeTruthy();
|
||||||
assert(res.VersionId);
|
|
||||||
next(null, res.ETag);
|
next(null, res.ETag);
|
||||||
}),
|
}),
|
||||||
(eTag, next) => getAwsSuccess(key, correctMD5, awsLocation,
|
(eTag, next) => getAwsSuccess(key, correctMD5, awsLocation,
|
||||||
|
@ -269,67 +251,59 @@ describe('MultipleBackend put object', function testSuite() {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put a large object to AWS', done => {
|
test('should put a large object to AWS', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
Body: bigBody,
|
Body: bigBody,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation } };
|
Metadata: { 'scal-location-constraint': awsLocation } };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected sucess, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
return awsGetCheck(key, bigS3MD5, bigAWSMD5, awsLocation,
|
return awsGetCheck(key, bigS3MD5, bigAWSMD5, awsLocation,
|
||||||
() => done());
|
() => done());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put objects with same key to AWS ' +
|
test('should put objects with same key to AWS ' +
|
||||||
'then file, and object should only be present in file', done => {
|
'then file, and object should only be present in file', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': awsLocation } };
|
Metadata: { 'scal-location-constraint': awsLocation } };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
params.Metadata =
|
params.Metadata =
|
||||||
{ 'scal-location-constraint': fileLocation };
|
{ 'scal-location-constraint': fileLocation };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
return s3.getObject({ Bucket: bucket, Key: key },
|
return s3.getObject({ Bucket: bucket, Key: key },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
expect(res.Metadata['scal-location-constraint']).toBe(fileLocation);
|
||||||
assert.strictEqual(
|
|
||||||
res.Metadata['scal-location-constraint'],
|
|
||||||
fileLocation);
|
|
||||||
return getAwsError(key, 'NoSuchKey', done);
|
return getAwsError(key, 'NoSuchKey', done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put objects with same key to file ' +
|
test('should put objects with same key to file ' +
|
||||||
'then AWS, and object should only be present on AWS', done => {
|
'then AWS, and object should only be present on AWS', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': fileLocation } };
|
Metadata: { 'scal-location-constraint': fileLocation } };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
params.Metadata = {
|
params.Metadata = {
|
||||||
'scal-location-constraint': awsLocation };
|
'scal-location-constraint': awsLocation };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
return awsGetCheck(key, correctMD5, correctMD5,
|
return awsGetCheck(key, correctMD5, correctMD5,
|
||||||
awsLocation, () => done());
|
awsLocation, () => done());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put two objects to AWS with same ' +
|
test('should put two objects to AWS with same ' +
|
||||||
'key, and newest object should be returned', done => {
|
'key, and newest object should be returned', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
|
@ -337,17 +311,15 @@ describe('MultipleBackend put object', function testSuite() {
|
||||||
Metadata: { 'scal-location-constraint': awsLocation,
|
Metadata: { 'scal-location-constraint': awsLocation,
|
||||||
'unique-header': 'first object' } };
|
'unique-header': 'first object' } };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
params.Metadata = { 'scal-location-constraint': awsLocation,
|
params.Metadata = { 'scal-location-constraint': awsLocation,
|
||||||
'unique-header': 'second object' };
|
'unique-header': 'second object' };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
return awsGetCheck(key, correctMD5, correctMD5,
|
return awsGetCheck(key, correctMD5, correctMD5,
|
||||||
awsLocation, result => {
|
awsLocation, result => {
|
||||||
assert.strictEqual(result.Metadata
|
expect(result.Metadata
|
||||||
['unique-header'], 'second object');
|
['unique-header']).toBe('second object');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -378,69 +350,63 @@ describeSkipIfNotMultiple('MultipleBackend put object based on bucket location',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put an object to mem with no location header',
|
test('should put an object to mem with no location header', done => {
|
||||||
done => {
|
|
||||||
process.stdout.write('Creating bucket\n');
|
process.stdout.write('Creating bucket\n');
|
||||||
return s3.createBucket({ Bucket: bucket,
|
return s3.createBucket({ Bucket: bucket,
|
||||||
CreateBucketConfiguration: {
|
CreateBucketConfiguration: {
|
||||||
LocationConstraint: memLocation,
|
LocationConstraint: memLocation,
|
||||||
},
|
},
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, `Error creating bucket: ${err}`);
|
expect(err).toEqual(null);
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key, Body: body };
|
const params = { Bucket: bucket, Key: key, Body: body };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
|
||||||
s3.getObject({ Bucket: bucket, Key: key }, (err, res) => {
|
s3.getObject({ Bucket: bucket, Key: key }, (err, res) => {
|
||||||
assert.strictEqual(err, null, 'Expected success, ' +
|
expect(err).toBe(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put an object to file with no location header', done => {
|
test('should put an object to file with no location header', done => {
|
||||||
process.stdout.write('Creating bucket\n');
|
process.stdout.write('Creating bucket\n');
|
||||||
return s3.createBucket({ Bucket: bucket,
|
return s3.createBucket({ Bucket: bucket,
|
||||||
CreateBucketConfiguration: {
|
CreateBucketConfiguration: {
|
||||||
LocationConstraint: fileLocation,
|
LocationConstraint: fileLocation,
|
||||||
},
|
},
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, `Error creating bucket: ${err}`);
|
expect(err).toEqual(null);
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key, Body: body };
|
const params = { Bucket: bucket, Key: key, Body: body };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
|
||||||
s3.getObject({ Bucket: bucket, Key: key }, (err, res) => {
|
s3.getObject({ Bucket: bucket, Key: key }, (err, res) => {
|
||||||
assert.strictEqual(err, null, 'Expected success, ' +
|
expect(err).toBe(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put an object to AWS with no location header', done => {
|
test('should put an object to AWS with no location header', done => {
|
||||||
process.stdout.write('Creating bucket\n');
|
process.stdout.write('Creating bucket\n');
|
||||||
return s3.createBucket({ Bucket: bucket,
|
return s3.createBucket({ Bucket: bucket,
|
||||||
CreateBucketConfiguration: {
|
CreateBucketConfiguration: {
|
||||||
LocationConstraint: awsLocation,
|
LocationConstraint: awsLocation,
|
||||||
},
|
},
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, `Error creating bucket: ${err}`);
|
expect(err).toEqual(null);
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key, Body: body };
|
const params = { Bucket: bucket, Key: key, Body: body };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error ${err}`);
|
|
||||||
return awsGetCheck(key, correctMD5, correctMD5, undefined,
|
return awsGetCheck(key, correctMD5, correctMD5, undefined,
|
||||||
() => done());
|
() => done());
|
||||||
});
|
});
|
||||||
|
@ -451,11 +417,11 @@ describeSkipIfNotMultiple('MultipleBackend put object based on bucket location',
|
||||||
|
|
||||||
describe('MultipleBackend put based on request endpoint', () => {
|
describe('MultipleBackend put based on request endpoint', () => {
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
});
|
});
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('Emptying bucket\n');
|
process.stdout.write('Emptying bucket\n');
|
||||||
return bucketUtil.empty(bucket)
|
return bucketUtil.empty(bucket)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
|
@ -468,18 +434,17 @@ describe('MultipleBackend put based on request endpoint', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should create bucket in corresponding backend', done => {
|
test('should create bucket in corresponding backend', done => {
|
||||||
process.stdout.write('Creating bucket');
|
process.stdout.write('Creating bucket');
|
||||||
const request = s3.createBucket({ Bucket: bucket });
|
const request = s3.createBucket({ Bucket: bucket });
|
||||||
request.on('build', () => {
|
request.on('build', () => {
|
||||||
request.httpRequest.body = '';
|
request.httpRequest.body = '';
|
||||||
});
|
});
|
||||||
request.send(err => {
|
request.send(err => {
|
||||||
assert.strictEqual(err, null, `Error creating bucket: ${err}`);
|
expect(err).toBe(null);
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
s3.putObject({ Bucket: bucket, Key: key, Body: body }, err => {
|
s3.putObject({ Bucket: bucket, Key: key, Body: body }, err => {
|
||||||
assert.strictEqual(err, null, 'Expected succes, ' +
|
expect(err).toBe(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
|
||||||
const host = request.service.endpoint.hostname;
|
const host = request.service.endpoint.hostname;
|
||||||
let endpoint = config.restEndpoints[host];
|
let endpoint = config.restEndpoints[host];
|
||||||
// s3 returns '' for us-east-1
|
// s3 returns '' for us-east-1
|
||||||
|
@ -487,14 +452,12 @@ describe('MultipleBackend put based on request endpoint', () => {
|
||||||
endpoint = '';
|
endpoint = '';
|
||||||
}
|
}
|
||||||
s3.getBucketLocation({ Bucket: bucket }, (err, data) => {
|
s3.getBucketLocation({ Bucket: bucket }, (err, data) => {
|
||||||
assert.strictEqual(err, null, 'Expected succes, ' +
|
expect(err).toBe(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
expect(data.LocationConstraint).toBe(endpoint);
|
||||||
assert.strictEqual(data.LocationConstraint, endpoint);
|
|
||||||
s3.getObject({ Bucket: bucket, Key: key },
|
s3.getObject({ Bucket: bucket, Key: key },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.strictEqual(err, null, 'Expected succes, ' +
|
expect(err).toBe(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
expect(res.ETag).toBe(`"${correctMD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${correctMD5}"`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -37,10 +37,9 @@ let s3;
|
||||||
function azureGetCheck(objectKey, azureMD5, azureMetadata, cb) {
|
function azureGetCheck(objectKey, azureMD5, azureMetadata, cb) {
|
||||||
azureClient.getBlobProperties(azureContainerName, objectKey,
|
azureClient.getBlobProperties(azureContainerName, objectKey,
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.strictEqual(err, null, 'Expected success, got error ' +
|
expect(err).toBe(null);
|
||||||
`on call to Azure: ${err}`);
|
|
||||||
const resMD5 = convertMD5(res.contentSettings.contentMD5);
|
const resMD5 = convertMD5(res.contentSettings.contentMD5);
|
||||||
assert.strictEqual(resMD5, azureMD5);
|
expect(resMD5).toBe(azureMD5);
|
||||||
assert.deepStrictEqual(res.metadata, azureMetadata);
|
assert.deepStrictEqual(res.metadata, azureMetadata);
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
|
@ -50,7 +49,7 @@ describeSkipIfNotMultipleOrCeph('MultipleBackend put object to AZURE', function
|
||||||
describeF() {
|
describeF() {
|
||||||
this.timeout(250000);
|
this.timeout(250000);
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
beforeEach(function beforeEachF() {
|
beforeEach(() => {
|
||||||
this.currentTest.keyName = uniqName(keyObject);
|
this.currentTest.keyName = uniqName(keyObject);
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
|
@ -69,6 +68,12 @@ describeF() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
describe('with bucket location header', () => {
|
describe('with bucket location header', () => {
|
||||||
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
beforeEach(done =>
|
beforeEach(done =>
|
||||||
s3.createBucket({ Bucket: azureContainerName,
|
s3.createBucket({ Bucket: azureContainerName,
|
||||||
CreateBucketConfiguration: {
|
CreateBucketConfiguration: {
|
||||||
|
@ -76,7 +81,7 @@ describeF() {
|
||||||
},
|
},
|
||||||
}, done));
|
}, done));
|
||||||
|
|
||||||
it('should return a NotImplemented error if try to put ' +
|
test('should return a NotImplemented error if try to put ' +
|
||||||
'versioning to bucket with Azure location', done => {
|
'versioning to bucket with Azure location', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
|
@ -85,28 +90,34 @@ describeF() {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
s3.putBucketVersioning(params, err => {
|
s3.putBucketVersioning(params, err => {
|
||||||
assert.strictEqual(err.code, 'NotImplemented');
|
expect(err.code).toBe('NotImplemented');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put an object to Azure, with no object location ' +
|
test('should put an object to Azure, with no object location ' +
|
||||||
'header, based on bucket location', function it(done) {
|
'header, based on bucket location', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.keyName,
|
Key: testContext.test.keyName,
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.putObject(params, err => setTimeout(() =>
|
next => s3.putObject(params, err => setTimeout(() =>
|
||||||
next(err), azureTimeout)),
|
next(err), azureTimeout)),
|
||||||
next => azureGetCheck(this.test.keyName, normalMD5, {},
|
next => azureGetCheck(testContext.test.keyName, normalMD5, {},
|
||||||
next),
|
next),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with no bucket location header', () => {
|
describe('with no bucket location header', () => {
|
||||||
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
beforeEach(() =>
|
beforeEach(() =>
|
||||||
s3.createBucketAsync({ Bucket: azureContainerName })
|
s3.createBucketAsync({ Bucket: azureContainerName })
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
|
@ -115,30 +126,29 @@ describeF() {
|
||||||
}));
|
}));
|
||||||
|
|
||||||
keys.forEach(key => {
|
keys.forEach(key => {
|
||||||
it(`should put a ${key.describe} object to Azure`,
|
test(`should put a ${key.describe} object to Azure`, done => {
|
||||||
function itF(done) {
|
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.keyName,
|
Key: testContext.test.keyName,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
Body: key.body,
|
Body: key.body,
|
||||||
};
|
};
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
setTimeout(() =>
|
setTimeout(() =>
|
||||||
azureGetCheck(this.test.keyName,
|
azureGetCheck(testContext.test.keyName,
|
||||||
key.MD5, azureMetadata,
|
key.MD5, azureMetadata,
|
||||||
() => done()), azureTimeout);
|
() => done()), azureTimeout);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put a object to Azure location with bucketMatch=false',
|
test(
|
||||||
function itF(done) {
|
'should put a object to Azure location with bucketMatch=false',
|
||||||
|
done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.keyName,
|
Key: testContext.test.keyName,
|
||||||
Metadata: { 'scal-location-constraint':
|
Metadata: { 'scal-location-constraint':
|
||||||
azureLocationMismatch },
|
azureLocationMismatch },
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
|
@ -149,17 +159,17 @@ describeF() {
|
||||||
/* eslint-enable camelcase */
|
/* eslint-enable camelcase */
|
||||||
};
|
};
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
setTimeout(() =>
|
setTimeout(() =>
|
||||||
azureGetCheck(
|
azureGetCheck(
|
||||||
`${azureContainerName}/${this.test.keyName}`,
|
`${azureContainerName}/${testContext.test.keyName}`,
|
||||||
normalMD5, azureMetadataMismatch,
|
normalMD5, azureMetadataMismatch,
|
||||||
() => done()), azureTimeout);
|
() => done()), azureTimeout);
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return error ServiceUnavailable putting an invalid ' +
|
test('should return error ServiceUnavailable putting an invalid ' +
|
||||||
'key name to Azure', done => {
|
'key name to Azure', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
|
@ -168,36 +178,35 @@ describeF() {
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
};
|
};
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.strictEqual(err.code, 'ServiceUnavailable');
|
expect(err.code).toBe('ServiceUnavailable');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error NotImplemented putting a ' +
|
test('should return error NotImplemented putting a ' +
|
||||||
'version to Azure', function itF(done) {
|
'version to Azure', done => {
|
||||||
s3.putBucketVersioning({
|
s3.putBucketVersioning({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
VersioningConfiguration: versioningEnabled,
|
VersioningConfiguration: versioningEnabled,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
const params = { Bucket: azureContainerName,
|
const params = { Bucket: azureContainerName,
|
||||||
Key: this.test.keyName,
|
Key: testContext.test.keyName,
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
Metadata: { 'scal-location-constraint':
|
Metadata: { 'scal-location-constraint':
|
||||||
azureLocation } };
|
azureLocation } };
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.strictEqual(err.code, 'NotImplemented');
|
expect(err.code).toBe('NotImplemented');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put two objects to Azure with same ' +
|
test('should put two objects to Azure with same ' +
|
||||||
'key, and newest object should be returned', function itF(done) {
|
'key, and newest object should be returned', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.keyName,
|
Key: testContext.test.keyName,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
};
|
};
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -209,19 +218,18 @@ describeF() {
|
||||||
},
|
},
|
||||||
next => {
|
next => {
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
azureGetCheck(this.test.keyName, normalMD5,
|
azureGetCheck(testContext.test.keyName, normalMD5,
|
||||||
azureMetadata, next);
|
azureMetadata, next);
|
||||||
}, azureTimeout);
|
}, azureTimeout);
|
||||||
},
|
},
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put objects with same key to Azure ' +
|
test('should put objects with same key to Azure ' +
|
||||||
'then file, and object should only be present in file', function
|
'then file, and object should only be present in file', done => {
|
||||||
itF(done) {
|
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.keyName,
|
Key: testContext.test.keyName,
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation } };
|
Metadata: { 'scal-location-constraint': azureLocation } };
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -234,28 +242,24 @@ describeF() {
|
||||||
},
|
},
|
||||||
next => s3.getObject({
|
next => s3.getObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.keyName,
|
Key: testContext.test.keyName,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
expect(res.Metadata['scal-location-constraint']).toBe(fileLocation);
|
||||||
assert.strictEqual(
|
|
||||||
res.Metadata['scal-location-constraint'],
|
|
||||||
fileLocation);
|
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
next => azureClient.getBlobProperties(azureContainerName,
|
next => azureClient.getBlobProperties(azureContainerName,
|
||||||
this.test.keyName, err => {
|
testContext.test.keyName, err => {
|
||||||
assert.strictEqual(err.code, 'NotFound');
|
expect(err.code).toBe('NotFound');
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put objects with same key to file ' +
|
test('should put objects with same key to file ' +
|
||||||
'then Azure, and object should only be present on Azure',
|
'then Azure, and object should only be present on Azure', done => {
|
||||||
function itF(done) {
|
|
||||||
const params = { Bucket: azureContainerName, Key:
|
const params = { Bucket: azureContainerName, Key:
|
||||||
this.test.keyName,
|
testContext.test.keyName,
|
||||||
Body: normalBody,
|
Body: normalBody,
|
||||||
Metadata: { 'scal-location-constraint': fileLocation } };
|
Metadata: { 'scal-location-constraint': fileLocation } };
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -267,42 +271,42 @@ describeF() {
|
||||||
s3.putObject(params, err => setTimeout(() =>
|
s3.putObject(params, err => setTimeout(() =>
|
||||||
next(err), azureTimeout));
|
next(err), azureTimeout));
|
||||||
},
|
},
|
||||||
next => azureGetCheck(this.test.keyName, normalMD5,
|
next => azureGetCheck(testContext.test.keyName, normalMD5,
|
||||||
azureMetadata, next),
|
azureMetadata, next),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with ongoing MPU with same key name', () => {
|
describe('with ongoing MPU with same key name', () => {
|
||||||
beforeEach(function beFn(done) {
|
beforeEach(done => {
|
||||||
s3.createMultipartUpload({
|
s3.createMultipartUpload({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.keyName,
|
Key: testContext.currentTest.keyName,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.equal(err, null, `Err creating MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
this.currentTest.uploadId = res.UploadId;
|
testContext.currentTest.uploadId = res.UploadId;
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(function afFn(done) {
|
afterEach(done => {
|
||||||
s3.abortMultipartUpload({
|
s3.abortMultipartUpload({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.currentTest.keyName,
|
Key: testContext.currentTest.keyName,
|
||||||
UploadId: this.currentTest.uploadId,
|
UploadId: testContext.currentTest.uploadId,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, `Err aborting MPU: ${err}`);
|
expect(err).toEqual(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return ServiceUnavailable', function itFn(done) {
|
test('should return ServiceUnavailable', done => {
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: azureContainerName,
|
Bucket: azureContainerName,
|
||||||
Key: this.test.keyName,
|
Key: testContext.test.keyName,
|
||||||
Metadata: { 'scal-location-constraint': azureLocation },
|
Metadata: { 'scal-location-constraint': azureLocation },
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.strictEqual(err.code, 'ServiceUnavailable');
|
expect(err.code).toBe('ServiceUnavailable');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -23,15 +23,13 @@ function checkGcp(key, gcpMD5, location, callback) {
|
||||||
Bucket: gcpBucket,
|
Bucket: gcpBucket,
|
||||||
Key: key,
|
Key: key,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.equal(err, null, `Expected success, got error ${err}`);
|
expect(err).toEqual(null);
|
||||||
if (res.Metadata && res.Metadata['scal-etag']) {
|
if (res.Metadata && res.Metadata['scal-etag']) {
|
||||||
assert.strictEqual(res.Metadata['scal-etag'], gcpMD5);
|
expect(res.Metadata['scal-etag']).toBe(gcpMD5);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(
|
expect(res.ETag.substring(1, res.ETag.length - 1)).toBe(gcpMD5);
|
||||||
res.ETag.substring(1, res.ETag.length - 1), gcpMD5);
|
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.Metadata['scal-location-constraint'],
|
expect(res.Metadata['scal-location-constraint']).toBe(location);
|
||||||
location);
|
|
||||||
callback(res);
|
callback(res);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -42,10 +40,8 @@ function checkGcpError(key, expectedError, callback) {
|
||||||
Bucket: gcpBucket,
|
Bucket: gcpBucket,
|
||||||
Key: key,
|
Key: key,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.notStrictEqual(err, undefined,
|
expect(err).not.toBe(undefined);
|
||||||
'Expected error but did not find one');
|
expect(err.code).toBe(expectedError);
|
||||||
assert.strictEqual(err.code, expectedError,
|
|
||||||
`Expected error code ${expectedError} but got ${err.code}`);
|
|
||||||
callback();
|
callback();
|
||||||
});
|
});
|
||||||
}, 1000);
|
}, 1000);
|
||||||
|
@ -61,11 +57,9 @@ function gcpGetCheck(objectKey, s3MD5, gcpMD5, location, callback) {
|
||||||
s3.getObject({ Bucket: bucket, Key: objectKey }, s3GetCallback);
|
s3.getObject({ Bucket: bucket, Key: objectKey }, s3GetCallback);
|
||||||
}, retryTimeout);
|
}, retryTimeout);
|
||||||
}
|
}
|
||||||
assert.strictEqual(err, null, 'Expected success, got error ' +
|
expect(err).toBe(null);
|
||||||
`on call to GCP through S3: ${err}`);
|
expect(res.ETag).toBe(`"${s3MD5}"`);
|
||||||
assert.strictEqual(res.ETag, `"${s3MD5}"`);
|
expect(res.Metadata['scal-location-constraint']).toBe(location);
|
||||||
assert.strictEqual(res.Metadata['scal-location-constraint'],
|
|
||||||
location);
|
|
||||||
process.stdout.write('Getting object from GCP\n');
|
process.stdout.write('Getting object from GCP\n');
|
||||||
return checkGcp(objectKey, gcpMD5, location, callback);
|
return checkGcp(objectKey, gcpMD5, location, callback);
|
||||||
});
|
});
|
||||||
|
@ -99,7 +93,7 @@ describeFn() {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with set location from "x-amz-meta-scal-' +
|
describe('with set location from "x-amz-meta-scal-' +
|
||||||
'location-constraint" header', function describe() {
|
'location-constraint" header', () => {
|
||||||
if (!process.env.S3_END_TO_END) {
|
if (!process.env.S3_END_TO_END) {
|
||||||
this.retries(2);
|
this.retries(2);
|
||||||
}
|
}
|
||||||
|
@ -124,14 +118,13 @@ describeFn() {
|
||||||
putTests.forEach(test => {
|
putTests.forEach(test => {
|
||||||
const { location, Body } = test.input;
|
const { location, Body } = test.input;
|
||||||
const { s3MD5, gcpMD5 } = test.output;
|
const { s3MD5, gcpMD5 } = test.output;
|
||||||
it(test.msg, done => {
|
test(test.msg, done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key, Body,
|
const params = { Bucket: bucket, Key: key, Body,
|
||||||
Metadata: { 'scal-location-constraint': location },
|
Metadata: { 'scal-location-constraint': location },
|
||||||
};
|
};
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
return gcpGetCheck(key, s3MD5, gcpMD5, location,
|
return gcpGetCheck(key, s3MD5, gcpMD5, location,
|
||||||
() => done());
|
() => done());
|
||||||
});
|
});
|
||||||
|
@ -139,32 +132,27 @@ describeFn() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('with object rewrites', function describe() {
|
describe('with object rewrites', () => {
|
||||||
if (!process.env.S3_END_TO_END) {
|
if (!process.env.S3_END_TO_END) {
|
||||||
this.retries(2);
|
this.retries(2);
|
||||||
}
|
}
|
||||||
|
|
||||||
it('should put objects with same key to GCP ' +
|
test('should put objects with same key to GCP ' +
|
||||||
'then file, and object should only be present in file', done => {
|
'then file, and object should only be present in file', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': gcpLocation } };
|
Metadata: { 'scal-location-constraint': gcpLocation } };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
params.Metadata =
|
params.Metadata =
|
||||||
{ 'scal-location-constraint': fileLocation };
|
{ 'scal-location-constraint': fileLocation };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
return s3.getObject({ Bucket: bucket, Key: key },
|
return s3.getObject({ Bucket: bucket, Key: key },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
expect(res.Metadata['scal-location-constraint']).toBe(fileLocation);
|
||||||
assert.strictEqual(
|
|
||||||
res.Metadata['scal-location-constraint'],
|
|
||||||
fileLocation);
|
|
||||||
return checkGcpError(key, 'NoSuchKey',
|
return checkGcpError(key, 'NoSuchKey',
|
||||||
() => done());
|
() => done());
|
||||||
});
|
});
|
||||||
|
@ -172,27 +160,25 @@ describeFn() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put objects with same key to file ' +
|
test('should put objects with same key to file ' +
|
||||||
'then GCP, and object should only be present on GCP', done => {
|
'then GCP, and object should only be present on GCP', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
Body: body,
|
Body: body,
|
||||||
Metadata: { 'scal-location-constraint': fileLocation } };
|
Metadata: { 'scal-location-constraint': fileLocation } };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
params.Metadata = {
|
params.Metadata = {
|
||||||
'scal-location-constraint': gcpLocation };
|
'scal-location-constraint': gcpLocation };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
return gcpGetCheck(key, correctMD5, correctMD5,
|
return gcpGetCheck(key, correctMD5, correctMD5,
|
||||||
gcpLocation, () => done());
|
gcpLocation, () => done());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put two objects to GCP with same ' +
|
test('should put two objects to GCP with same ' +
|
||||||
'key, and newest object should be returned', done => {
|
'key, and newest object should be returned', done => {
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
|
@ -200,17 +186,15 @@ describeFn() {
|
||||||
Metadata: { 'scal-location-constraint': gcpLocation,
|
Metadata: { 'scal-location-constraint': gcpLocation,
|
||||||
'unique-header': 'first object' } };
|
'unique-header': 'first object' } };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
params.Metadata = { 'scal-location-constraint': gcpLocation,
|
params.Metadata = { 'scal-location-constraint': gcpLocation,
|
||||||
'unique-header': 'second object' };
|
'unique-header': 'second object' };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${err}`);
|
|
||||||
return gcpGetCheck(key, correctMD5, correctMD5,
|
return gcpGetCheck(key, correctMD5, correctMD5,
|
||||||
gcpLocation, result => {
|
gcpLocation, result => {
|
||||||
assert.strictEqual(result.Metadata
|
expect(result.Metadata
|
||||||
['unique-header'], 'second object');
|
['unique-header']).toBe('second object');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -241,20 +225,19 @@ describeSkipIfNotMultipleOrCeph('MultipleBackend put object' +
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put an object to GCP with no location header', done => {
|
test('should put an object to GCP with no location header', done => {
|
||||||
process.stdout.write('Creating bucket\n');
|
process.stdout.write('Creating bucket\n');
|
||||||
return s3.createBucket({ Bucket: bucket,
|
return s3.createBucket({ Bucket: bucket,
|
||||||
CreateBucketConfiguration: {
|
CreateBucketConfiguration: {
|
||||||
LocationConstraint: gcpLocation,
|
LocationConstraint: gcpLocation,
|
||||||
},
|
},
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, `Error creating bucket: ${err}`);
|
expect(err).toEqual(null);
|
||||||
process.stdout.write('Putting object\n');
|
process.stdout.write('Putting object\n');
|
||||||
const key = `somekey-${genUniqID()}`;
|
const key = `somekey-${genUniqID()}`;
|
||||||
const params = { Bucket: bucket, Key: key, Body: body };
|
const params = { Bucket: bucket, Key: key, Body: body };
|
||||||
return s3.putObject(params, err => {
|
return s3.putObject(params, err => {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error ${err}`);
|
|
||||||
return gcpGetCheck(key, correctMD5, correctMD5, undefined,
|
return gcpGetCheck(key, correctMD5, correctMD5, undefined,
|
||||||
() => done());
|
() => done());
|
||||||
});
|
});
|
||||||
|
|
|
@ -13,7 +13,7 @@ let s3;
|
||||||
|
|
||||||
describe('Requests to ip endpoint not in config', () => {
|
describe('Requests to ip endpoint not in config', () => {
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
// change endpoint to endpoint with ip address
|
// change endpoint to endpoint with ip address
|
||||||
// not in config
|
// not in config
|
||||||
|
@ -21,7 +21,7 @@ describe('Requests to ip endpoint not in config', () => {
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
});
|
});
|
||||||
|
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('Emptying bucket\n');
|
process.stdout.write('Emptying bucket\n');
|
||||||
return bucketUtil.empty(bucket)
|
return bucketUtil.empty(bucket)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
|
@ -34,10 +34,9 @@ describe('Requests to ip endpoint not in config', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept put bucket request ' +
|
test('should accept put bucket request ' +
|
||||||
'to IP address endpoint that is not in config using ' +
|
'to IP address endpoint that is not in config using ' +
|
||||||
'path style',
|
'path style', done => {
|
||||||
done => {
|
|
||||||
s3.createBucket({ Bucket: bucket }, err => {
|
s3.createBucket({ Bucket: bucket }, err => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
done();
|
done();
|
||||||
|
@ -56,16 +55,15 @@ describe('Requests to ip endpoint not in config', () => {
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
// us-east-1 is returned as empty string
|
// us-east-1 is returned as empty string
|
||||||
assert.strictEqual(res
|
expect(res
|
||||||
.LocationConstraint, '');
|
.LocationConstraint).toBe('');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept put object request ' +
|
test('should accept put object request ' +
|
||||||
'to IP address endpoint that is not in config using ' +
|
'to IP address endpoint that is not in config using ' +
|
||||||
'path style and use the bucket location for the object',
|
'path style and use the bucket location for the object', done => {
|
||||||
done => {
|
|
||||||
s3.putObject({ Bucket: bucket, Key: key, Body: body },
|
s3.putObject({ Bucket: bucket, Key: key, Body: body },
|
||||||
err => {
|
err => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
@ -77,14 +75,13 @@ describe('Requests to ip endpoint not in config', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should accept get object request ' +
|
test('should accept get object request ' +
|
||||||
'to IP address endpoint that is not in config using ' +
|
'to IP address endpoint that is not in config using ' +
|
||||||
'path style',
|
'path style', done => {
|
||||||
done => {
|
|
||||||
s3.getObject({ Bucket: bucket, Key: key },
|
s3.getObject({ Bucket: bucket, Key: key },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
assert.strictEqual(res.ETag, expectedETag);
|
expect(res.ETag).toBe(expectedETag);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -60,12 +60,12 @@ if (config.backends.data === 'multiple') {
|
||||||
|
|
||||||
function _assertErrorResult(err, expectedError, desc) {
|
function _assertErrorResult(err, expectedError, desc) {
|
||||||
if (!expectedError) {
|
if (!expectedError) {
|
||||||
assert.strictEqual(err, null, `got error for ${desc}: ${err}`);
|
expect(err).toBe(null);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
assert(err, `expected ${expectedError} but found no error`);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, expectedError);
|
expect(err.code).toBe(expectedError);
|
||||||
assert.strictEqual(err.statusCode, errors[expectedError].code);
|
expect(err.statusCode).toBe(errors[expectedError].code);
|
||||||
}
|
}
|
||||||
|
|
||||||
const utils = {
|
const utils = {
|
||||||
|
@ -213,8 +213,7 @@ utils.putToAwsBackend = (s3, bucket, key, body, cb) => {
|
||||||
utils.enableVersioning = (s3, bucket, cb) => {
|
utils.enableVersioning = (s3, bucket, cb) => {
|
||||||
s3.putBucketVersioning({ Bucket: bucket,
|
s3.putBucketVersioning({ Bucket: bucket,
|
||||||
VersioningConfiguration: versioningEnabled }, err => {
|
VersioningConfiguration: versioningEnabled }, err => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`enabling versioning, got error ${err}`);
|
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
@ -222,8 +221,7 @@ utils.enableVersioning = (s3, bucket, cb) => {
|
||||||
utils.suspendVersioning = (s3, bucket, cb) => {
|
utils.suspendVersioning = (s3, bucket, cb) => {
|
||||||
s3.putBucketVersioning({ Bucket: bucket,
|
s3.putBucketVersioning({ Bucket: bucket,
|
||||||
VersioningConfiguration: versioningSuspended }, err => {
|
VersioningConfiguration: versioningSuspended }, err => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`enabling versioning, got error ${err}`);
|
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
@ -232,8 +230,7 @@ utils.mapToAwsPuts = (s3, bucket, key, dataArray, cb) => {
|
||||||
async.mapSeries(dataArray, (data, next) => {
|
async.mapSeries(dataArray, (data, next) => {
|
||||||
utils.putToAwsBackend(s3, bucket, key, data, next);
|
utils.putToAwsBackend(s3, bucket, key, data, next);
|
||||||
}, (err, results) => {
|
}, (err, results) => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`putting object, got error ${err}`);
|
|
||||||
cb(null, results);
|
cb(null, results);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
@ -259,27 +256,23 @@ utils.getAndAssertResult = (s3, params, cb) => {
|
||||||
if (expectedError) {
|
if (expectedError) {
|
||||||
return cb();
|
return cb();
|
||||||
}
|
}
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`getting object, got error ${err}`);
|
|
||||||
if (body) {
|
if (body) {
|
||||||
assert(data.Body, 'expected object body in response');
|
expect(data.Body).toBeTruthy();
|
||||||
assert.equal(data.Body.length, data.ContentLength,
|
expect(data.Body.length).toEqual(data.ContentLength);
|
||||||
`received data of length ${data.Body.length} does not ` +
|
|
||||||
'equal expected based on ' +
|
|
||||||
`content length header of ${data.ContentLength}`);
|
|
||||||
const expectedMD5 = utils.expectedETag(body, false);
|
const expectedMD5 = utils.expectedETag(body, false);
|
||||||
const resultMD5 = utils.expectedETag(data.Body, false);
|
const resultMD5 = utils.expectedETag(data.Body, false);
|
||||||
assert.strictEqual(resultMD5, expectedMD5);
|
expect(resultMD5).toBe(expectedMD5);
|
||||||
}
|
}
|
||||||
if (!expectedVersionId) {
|
if (!expectedVersionId) {
|
||||||
assert.strictEqual(data.VersionId, undefined);
|
expect(data.VersionId).toBe(undefined);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(data.VersionId, expectedVersionId);
|
expect(data.VersionId).toBe(expectedVersionId);
|
||||||
}
|
}
|
||||||
if (expectedTagCount && expectedTagCount === '0') {
|
if (expectedTagCount && expectedTagCount === '0') {
|
||||||
assert.strictEqual(data.TagCount, undefined);
|
expect(data.TagCount).toBe(undefined);
|
||||||
} else if (expectedTagCount) {
|
} else if (expectedTagCount) {
|
||||||
assert.strictEqual(data.TagCount, expectedTagCount);
|
expect(data.TagCount).toBe(expectedTagCount);
|
||||||
}
|
}
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
|
@ -314,11 +307,10 @@ utils.getAwsRetry = (params, retryNumber, assertCb) => {
|
||||||
|
|
||||||
utils.awsGetLatestVerId = (key, body, cb) =>
|
utils.awsGetLatestVerId = (key, body, cb) =>
|
||||||
utils.getAwsRetry({ key }, 0, (err, result) => {
|
utils.getAwsRetry({ key }, 0, (err, result) => {
|
||||||
assert.strictEqual(err, null, 'Expected success ' +
|
expect(err).toBe(null);
|
||||||
`getting object from AWS, got error ${err}`);
|
|
||||||
const resultMD5 = utils.expectedETag(result.Body, false);
|
const resultMD5 = utils.expectedETag(result.Body, false);
|
||||||
const expectedMD5 = utils.expectedETag(body, false);
|
const expectedMD5 = utils.expectedETag(body, false);
|
||||||
assert.strictEqual(resultMD5, expectedMD5, 'expected different body');
|
expect(resultMD5).toBe(expectedMD5);
|
||||||
return cb(null, result.VersionId);
|
return cb(null, result.VersionId);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -346,11 +338,11 @@ utils.tagging.putTaggingAndAssert = (s3, params, cb) => {
|
||||||
if (expectedError) {
|
if (expectedError) {
|
||||||
return cb();
|
return cb();
|
||||||
}
|
}
|
||||||
assert.strictEqual(err, null, `got error for putting tags: ${err}`);
|
expect(err).toBe(null);
|
||||||
if (expectedVersionId) {
|
if (expectedVersionId) {
|
||||||
assert.strictEqual(data.VersionId, expectedVersionId);
|
expect(data.VersionId).toBe(expectedVersionId);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(data.VersionId, undefined);
|
expect(data.VersionId).toBe(undefined);
|
||||||
}
|
}
|
||||||
return cb(null, data.VersionId);
|
return cb(null, data.VersionId);
|
||||||
});
|
});
|
||||||
|
@ -367,11 +359,11 @@ utils.tagging.getTaggingAndAssert = (s3, params, cb) => {
|
||||||
}
|
}
|
||||||
const expectedTagResult = _getTaggingConfig(expectedTags);
|
const expectedTagResult = _getTaggingConfig(expectedTags);
|
||||||
const expectedTagCount = `${Object.keys(expectedTags).length}`;
|
const expectedTagCount = `${Object.keys(expectedTags).length}`;
|
||||||
assert.strictEqual(err, null, `got error for putting tags: ${err}`);
|
expect(err).toBe(null);
|
||||||
if (expectedVersionId) {
|
if (expectedVersionId) {
|
||||||
assert.strictEqual(data.VersionId, expectedVersionId);
|
expect(data.VersionId).toBe(expectedVersionId);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(data.VersionId, undefined);
|
expect(data.VersionId).toBe(undefined);
|
||||||
}
|
}
|
||||||
assert.deepStrictEqual(data.TagSet, expectedTagResult.TagSet);
|
assert.deepStrictEqual(data.TagSet, expectedTagResult.TagSet);
|
||||||
if (getObject === false) {
|
if (getObject === false) {
|
||||||
|
@ -391,11 +383,11 @@ utils.tagging.delTaggingAndAssert = (s3, params, cb) => {
|
||||||
if (expectedError) {
|
if (expectedError) {
|
||||||
return cb();
|
return cb();
|
||||||
}
|
}
|
||||||
assert.strictEqual(err, null, `got error for putting tags: ${err}`);
|
expect(err).toBe(null);
|
||||||
if (expectedVersionId) {
|
if (expectedVersionId) {
|
||||||
assert.strictEqual(data.VersionId, expectedVersionId);
|
expect(data.VersionId).toBe(expectedVersionId);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(data.VersionId, undefined);
|
expect(data.VersionId).toBe(undefined);
|
||||||
}
|
}
|
||||||
return utils.tagging.getTaggingAndAssert(s3, { bucket, key, versionId,
|
return utils.tagging.getTaggingAndAssert(s3, { bucket, key, versionId,
|
||||||
expectedVersionId, expectedTags: {} }, () => cb());
|
expectedVersionId, expectedTags: {} }, () => cb());
|
||||||
|
@ -407,8 +399,7 @@ utils.tagging.awsGetAssertTags = (params, cb) => {
|
||||||
const expectedTagResult = _getTaggingConfig(expectedTags);
|
const expectedTagResult = _getTaggingConfig(expectedTags);
|
||||||
awsS3.getObjectTagging({ Bucket: awsBucket, Key: key,
|
awsS3.getObjectTagging({ Bucket: awsBucket, Key: key,
|
||||||
VersionId: versionId }, (err, data) => {
|
VersionId: versionId }, (err, data) => {
|
||||||
assert.strictEqual(err, null, 'got unexpected error getting ' +
|
expect(err).toBe(null);
|
||||||
`tags directly from AWS: ${err}`);
|
|
||||||
assert.deepStrictEqual(data.TagSet, expectedTagResult.TagSet);
|
assert.deepStrictEqual(data.TagSet, expectedTagResult.TagSet);
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
|
|
|
@ -48,7 +48,7 @@ class ContinueRequestHandler {
|
||||||
const options = this.getRequestOptions();
|
const options = this.getRequestOptions();
|
||||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
|
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
|
||||||
const req = transport.request(options, res => {
|
const req = transport.request(options, res => {
|
||||||
assert.strictEqual(res.statusCode, statusCode);
|
expect(res.statusCode).toBe(statusCode);
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
// Send the body either on the continue event, or immediately.
|
// Send the body either on the continue event, or immediately.
|
||||||
|
@ -64,18 +64,18 @@ class ContinueRequestHandler {
|
||||||
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
|
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
|
||||||
const req = transport.request(options);
|
const req = transport.request(options);
|
||||||
// At this point we have only sent the header.
|
// At this point we have only sent the header.
|
||||||
assert(req.output.length === 1);
|
expect(req.output.length === 1).toBeTruthy();
|
||||||
const headerLen = req.output[0].length;
|
const headerLen = req.output[0].length;
|
||||||
req.on('continue', () => {
|
req.on('continue', () => {
|
||||||
// Has only the header been sent?
|
// Has only the header been sent?
|
||||||
assert.strictEqual(req.socket.bytesWritten, headerLen);
|
expect(req.socket.bytesWritten).toBe(headerLen);
|
||||||
// Send the body since the continue event has been emitted.
|
// Send the body since the continue event has been emitted.
|
||||||
return req.end(body);
|
return req.end(body);
|
||||||
});
|
});
|
||||||
req.on('close', () => {
|
req.on('close', () => {
|
||||||
const expected = body.length + headerLen;
|
const expected = body.length + headerLen;
|
||||||
// Has the entire body been sent?
|
// Has the entire body been sent?
|
||||||
assert.strictEqual(req.socket.bytesWritten, expected);
|
expect(req.socket.bytesWritten).toBe(expected);
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
req.on('error', err => cb(err));
|
req.on('error', err => cb(err));
|
||||||
|
@ -106,25 +106,25 @@ describe('PUT public object with 100-continue header', () => {
|
||||||
bucketUtil.empty(bucket)
|
bucketUtil.empty(bucket)
|
||||||
.then(() => bucketUtil.deleteOne(bucket)));
|
.then(() => bucketUtil.deleteOne(bucket)));
|
||||||
|
|
||||||
it('should return 200 status code', done =>
|
test('should return 200 status code', done =>
|
||||||
continueRequest.hasStatusCode(200, done));
|
continueRequest.hasStatusCode(200, done));
|
||||||
|
|
||||||
it('should return 200 status code with upper case value', done =>
|
test('should return 200 status code with upper case value', done =>
|
||||||
continueRequest.setExpectHeader('100-CONTINUE')
|
continueRequest.setExpectHeader('100-CONTINUE')
|
||||||
.hasStatusCode(200, done));
|
.hasStatusCode(200, done));
|
||||||
|
|
||||||
it('should return 200 status code if incorrect value', done =>
|
test('should return 200 status code if incorrect value', done =>
|
||||||
continueRequest.setExpectHeader('101-continue')
|
continueRequest.setExpectHeader('101-continue')
|
||||||
.hasStatusCode(200, done));
|
.hasStatusCode(200, done));
|
||||||
|
|
||||||
it('should return 403 status code if cannot authenticate', done =>
|
test('should return 403 status code if cannot authenticate', done =>
|
||||||
continueRequest.setRequestPath(invalidSignedURL)
|
continueRequest.setRequestPath(invalidSignedURL)
|
||||||
.hasStatusCode(403, done));
|
.hasStatusCode(403, done));
|
||||||
|
|
||||||
it('should wait for continue event before sending body', done =>
|
test('should wait for continue event before sending body', done =>
|
||||||
continueRequest.sendsBodyOnContinue(done));
|
continueRequest.sendsBodyOnContinue(done));
|
||||||
|
|
||||||
it('should continue if a public user', done =>
|
test('should continue if a public user', done =>
|
||||||
continueRequest.setRequestPath(invalidSignedURL)
|
continueRequest.setRequestPath(invalidSignedURL)
|
||||||
.sendsBodyOnContinue(done));
|
.sendsBodyOnContinue(done));
|
||||||
});
|
});
|
||||||
|
|
|
@ -9,9 +9,9 @@ const key = 'key';
|
||||||
const bodyFirstPart = Buffer.allocUnsafe(10).fill(0);
|
const bodyFirstPart = Buffer.allocUnsafe(10).fill(0);
|
||||||
|
|
||||||
function checkError(err, code, message) {
|
function checkError(err, code, message) {
|
||||||
assert.notEqual(err, null, 'Expected failure but got success');
|
expect(err).not.toEqual(null);
|
||||||
assert.strictEqual(err.code, code);
|
expect(err.code).toBe(code);
|
||||||
assert.strictEqual(err.message, message);
|
expect(err.message).toBe(message);
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('Abort MPU', () => {
|
describe('Abort MPU', () => {
|
||||||
|
@ -44,11 +44,9 @@ describe('Abort MPU', () => {
|
||||||
UploadId: uploadId,
|
UploadId: uploadId,
|
||||||
})
|
})
|
||||||
.then(() => bucketUtil.empty(bucket))
|
.then(() => bucketUtil.empty(bucket))
|
||||||
.then(() => bucketUtil.deleteOne(bucket))
|
.then(() => bucketUtil.deleteOne(bucket)));
|
||||||
);
|
|
||||||
|
|
||||||
it('should return InvalidRequest error if aborting without key',
|
test('should return InvalidRequest error if aborting without key', done => {
|
||||||
done => {
|
|
||||||
s3.abortMultipartUpload({
|
s3.abortMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: '',
|
Key: '',
|
||||||
|
|
|
@ -37,10 +37,10 @@ function uploadPart(n, uploadId, s3, next) {
|
||||||
// NOTE: This test has a history of failing in end-to-end Integration tests.
|
// NOTE: This test has a history of failing in end-to-end Integration tests.
|
||||||
// See Integration#449 for more details. A possible cause for its flakiness
|
// See Integration#449 for more details. A possible cause for its flakiness
|
||||||
// could be poor system resources.
|
// could be poor system resources.
|
||||||
describe('large mpu', function tester() {
|
describe('large mpu', () => {
|
||||||
this.timeout(600000);
|
this.timeout(600000);
|
||||||
let s3;
|
let s3;
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
const config = getConfig('default', { signatureVersion: 'v4' });
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
s3 = new S3(config);
|
s3 = new S3(config);
|
||||||
// disable node sdk retries and timeout to prevent InvalidPart
|
// disable node sdk retries and timeout to prevent InvalidPart
|
||||||
|
@ -53,7 +53,7 @@ describe('large mpu', function tester() {
|
||||||
s3.createBucket({ Bucket: bucket }, done);
|
s3.createBucket({ Bucket: bucket }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
s3.deleteObject({ Bucket: bucket, Key: key }, err => {
|
s3.deleteObject({ Bucket: bucket, Key: key }, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
process.stdout.write('err deleting object in after: ', err);
|
process.stdout.write('err deleting object in after: ', err);
|
||||||
|
@ -118,8 +118,7 @@ describe('large mpu', function tester() {
|
||||||
if (err) {
|
if (err) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.ETag,
|
expect(data.ETag).toBe(`"${finalETag}-${partCount}"`);
|
||||||
`"${finalETag}-${partCount}"`);
|
|
||||||
return next();
|
return next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
|
@ -12,8 +12,7 @@ const bucket = `completempu${date}`;
|
||||||
const key = 'key';
|
const key = 'key';
|
||||||
|
|
||||||
function checkNoError(err) {
|
function checkNoError(err) {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -34,9 +33,9 @@ describe('Complete MPU', () => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
const versionId = data.VersionId;
|
const versionId = data.VersionId;
|
||||||
if (expectedVid) {
|
if (expectedVid) {
|
||||||
assert.notEqual(versionId, undefined);
|
expect(versionId).not.toEqual(undefined);
|
||||||
} else {
|
} else {
|
||||||
assert.strictEqual(versionId, expectedVid);
|
expect(versionId).toBe(expectedVid);
|
||||||
}
|
}
|
||||||
return s3.getObject({
|
return s3.getObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -45,7 +44,7 @@ describe('Complete MPU', () => {
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
if (versionId) {
|
if (versionId) {
|
||||||
assert.strictEqual(data.VersionId, versionId);
|
expect(data.VersionId).toBe(versionId);
|
||||||
}
|
}
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
|
@ -92,10 +91,9 @@ describe('Complete MPU', () => {
|
||||||
.then(result => {
|
.then(result => {
|
||||||
uploadId = result.uploadId;
|
uploadId = result.uploadId;
|
||||||
eTag = result.eTag;
|
eTag = result.eTag;
|
||||||
})
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
it('should complete an MPU with fewer parts than were ' +
|
test('should complete an MPU with fewer parts than were ' +
|
||||||
'originally put without returning a version id', done => {
|
'originally put without returning a version id', done => {
|
||||||
_completeMpuAndCheckVid(uploadId, eTag, undefined, done);
|
_completeMpuAndCheckVid(uploadId, eTag, undefined, done);
|
||||||
});
|
});
|
||||||
|
@ -111,10 +109,9 @@ describe('Complete MPU', () => {
|
||||||
.then(result => {
|
.then(result => {
|
||||||
uploadId = result.uploadId;
|
uploadId = result.uploadId;
|
||||||
eTag = result.eTag;
|
eTag = result.eTag;
|
||||||
})
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
it('should complete an MPU with fewer parts than were ' +
|
test('should complete an MPU with fewer parts than were ' +
|
||||||
'originally put and return a version id', done => {
|
'originally put and return a version id', done => {
|
||||||
_completeMpuAndCheckVid(uploadId, eTag, true, done);
|
_completeMpuAndCheckVid(uploadId, eTag, true, done);
|
||||||
});
|
});
|
||||||
|
@ -130,10 +127,9 @@ describe('Complete MPU', () => {
|
||||||
.then(result => {
|
.then(result => {
|
||||||
uploadId = result.uploadId;
|
uploadId = result.uploadId;
|
||||||
eTag = result.eTag;
|
eTag = result.eTag;
|
||||||
})
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
it('should complete an MPU with fewer parts than were ' +
|
test('should complete an MPU with fewer parts than were ' +
|
||||||
'originally put and should not return a version id', done => {
|
'originally put and should not return a version id', done => {
|
||||||
_completeMpuAndCheckVid(uploadId, eTag, undefined, done);
|
_completeMpuAndCheckVid(uploadId, eTag, undefined, done);
|
||||||
});
|
});
|
||||||
|
|
|
@ -21,14 +21,14 @@ describe('aws-node-sdk test bucket complete mpu', () => {
|
||||||
let s3;
|
let s3;
|
||||||
|
|
||||||
// setup test
|
// setup test
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
const config = getConfig('default', { signatureVersion: 'v4' });
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
s3 = new S3(config);
|
s3 = new S3(config);
|
||||||
s3.createBucket({ Bucket: bucket }, done);
|
s3.createBucket({ Bucket: bucket }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
// delete bucket after testing
|
// delete bucket after testing
|
||||||
after(done => s3.deleteBucket({ Bucket: bucket }, done));
|
afterAll(done => s3.deleteBucket({ Bucket: bucket }, done));
|
||||||
|
|
||||||
const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it;
|
const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it;
|
||||||
itSkipIfAWS('should not accept xml body larger than 1 MB', done => {
|
itSkipIfAWS('should not accept xml body larger than 1 MB', done => {
|
||||||
|
@ -42,9 +42,8 @@ describe('aws-node-sdk test bucket complete mpu', () => {
|
||||||
};
|
};
|
||||||
s3.completeMultipartUpload(params, error => {
|
s3.completeMultipartUpload(params, error => {
|
||||||
if (error) {
|
if (error) {
|
||||||
assert.strictEqual(error.statusCode, 400);
|
expect(error.statusCode).toBe(400);
|
||||||
assert.strictEqual(
|
expect(error.code).toBe('InvalidRequest');
|
||||||
error.code, 'InvalidRequest');
|
|
||||||
done();
|
done();
|
||||||
} else {
|
} else {
|
||||||
done('accepted xml body larger than 1 MB');
|
done('accepted xml body larger than 1 MB');
|
||||||
|
|
|
@ -21,13 +21,12 @@ const otherAccountS3 = otherAccountBucketUtility.s3;
|
||||||
const oneHundredMBPlus11 = 110100481;
|
const oneHundredMBPlus11 = 110100481;
|
||||||
|
|
||||||
function checkNoError(err) {
|
function checkNoError(err) {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkError(err, code) {
|
function checkError(err, code) {
|
||||||
assert.notEqual(err, null, 'Expected failure but got success');
|
expect(err).not.toEqual(null);
|
||||||
assert.strictEqual(err.code, code);
|
expect(err.code).toBe(code);
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('Object Part Copy', () => {
|
describe('Object Part Copy', () => {
|
||||||
|
@ -91,11 +90,10 @@ describe('Object Part Copy', () => {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.then(() => bucketUtil.deleteMany([sourceBucketName,
|
.then(() => bucketUtil.deleteMany([sourceBucketName,
|
||||||
destBucketName]))
|
destBucketName])));
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
it('should copy a part from a source bucket to a different ' +
|
test('should copy a part from a source bucket to a different ' +
|
||||||
'destination bucket', done => {
|
'destination bucket', done => {
|
||||||
s3.uploadPartCopy({ Bucket: destBucketName,
|
s3.uploadPartCopy({ Bucket: destBucketName,
|
||||||
Key: destObjName,
|
Key: destObjName,
|
||||||
|
@ -105,13 +103,13 @@ describe('Object Part Copy', () => {
|
||||||
},
|
},
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.ETag, etag);
|
expect(res.ETag).toBe(etag);
|
||||||
assert(res.LastModified);
|
expect(res.LastModified).toBeTruthy();
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy a part from a source bucket to a different ' +
|
test('should copy a part from a source bucket to a different ' +
|
||||||
'destination bucket and complete the MPU', done => {
|
'destination bucket and complete the MPU', done => {
|
||||||
s3.uploadPartCopy({ Bucket: destBucketName,
|
s3.uploadPartCopy({ Bucket: destBucketName,
|
||||||
Key: destObjName,
|
Key: destObjName,
|
||||||
|
@ -121,8 +119,8 @@ describe('Object Part Copy', () => {
|
||||||
},
|
},
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.ETag, etag);
|
expect(res.ETag).toBe(etag);
|
||||||
assert(res.LastModified);
|
expect(res.LastModified).toBeTruthy();
|
||||||
s3.completeMultipartUpload({
|
s3.completeMultipartUpload({
|
||||||
Bucket: destBucketName,
|
Bucket: destBucketName,
|
||||||
Key: destObjName,
|
Key: destObjName,
|
||||||
|
@ -134,17 +132,16 @@ describe('Object Part Copy', () => {
|
||||||
},
|
},
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.Bucket, destBucketName);
|
expect(res.Bucket).toBe(destBucketName);
|
||||||
assert.strictEqual(res.Key, destObjName);
|
expect(res.Key).toBe(destObjName);
|
||||||
// AWS confirmed final ETag for MPU
|
// AWS confirmed final ETag for MPU
|
||||||
assert.strictEqual(res.ETag,
|
expect(res.ETag).toBe('"db77ebbae9e9f5a244a26b86193ad818-1"');
|
||||||
'"db77ebbae9e9f5a244a26b86193ad818-1"');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return EntityTooLarge error if attempt to copy ' +
|
test('should return EntityTooLarge error if attempt to copy ' +
|
||||||
'object larger than max and do not specify smaller ' +
|
'object larger than max and do not specify smaller ' +
|
||||||
'range in request', done => {
|
'range in request', done => {
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
|
@ -166,7 +163,7 @@ describe('Object Part Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return EntityTooLarge error if attempt to copy ' +
|
test('should return EntityTooLarge error if attempt to copy ' +
|
||||||
'object larger than max and specify too large ' +
|
'object larger than max and specify too large ' +
|
||||||
'range in request', done => {
|
'range in request', done => {
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
|
@ -189,7 +186,7 @@ describe('Object Part Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should succeed if attempt to copy ' +
|
test('should succeed if attempt to copy ' +
|
||||||
'object larger than max but specify acceptable ' +
|
'object larger than max but specify acceptable ' +
|
||||||
'range in request', done => {
|
'range in request', done => {
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
|
@ -212,7 +209,7 @@ describe('Object Part Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy a 0 byte object part from a source bucket to a ' +
|
test('should copy a 0 byte object part from a source bucket to a ' +
|
||||||
'different destination bucket and complete the MPU', done => {
|
'different destination bucket and complete the MPU', done => {
|
||||||
const emptyFileETag = '"d41d8cd98f00b204e9800998ecf8427e"';
|
const emptyFileETag = '"d41d8cd98f00b204e9800998ecf8427e"';
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
|
@ -228,8 +225,8 @@ describe('Object Part Copy', () => {
|
||||||
},
|
},
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.ETag, emptyFileETag);
|
expect(res.ETag).toBe(emptyFileETag);
|
||||||
assert(res.LastModified);
|
expect(res.LastModified).toBeTruthy();
|
||||||
s3.completeMultipartUpload({
|
s3.completeMultipartUpload({
|
||||||
Bucket: destBucketName,
|
Bucket: destBucketName,
|
||||||
Key: destObjName,
|
Key: destObjName,
|
||||||
|
@ -241,18 +238,17 @@ describe('Object Part Copy', () => {
|
||||||
},
|
},
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.Bucket, destBucketName);
|
expect(res.Bucket).toBe(destBucketName);
|
||||||
assert.strictEqual(res.Key, destObjName);
|
expect(res.Key).toBe(destObjName);
|
||||||
// AWS confirmed final ETag for MPU
|
// AWS confirmed final ETag for MPU
|
||||||
assert.strictEqual(res.ETag,
|
expect(res.ETag).toBe('"59adb24ef3cdbe0297f05b395827453f-1"');
|
||||||
'"59adb24ef3cdbe0297f05b395827453f-1"');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy a part using a range header from a source bucket ' +
|
test('should copy a part using a range header from a source bucket ' +
|
||||||
'to a different destination bucket and complete the MPU', done => {
|
'to a different destination bucket and complete the MPU', done => {
|
||||||
const rangeETag = '"ac1be00f1f162e20d58099eec2ea1c70"';
|
const rangeETag = '"ac1be00f1f162e20d58099eec2ea1c70"';
|
||||||
// AWS confirmed final ETag for MPU
|
// AWS confirmed final ETag for MPU
|
||||||
|
@ -266,8 +262,8 @@ describe('Object Part Copy', () => {
|
||||||
},
|
},
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.ETag, rangeETag);
|
expect(res.ETag).toBe(rangeETag);
|
||||||
assert(res.LastModified);
|
expect(res.LastModified).toBeTruthy();
|
||||||
s3.completeMultipartUpload({
|
s3.completeMultipartUpload({
|
||||||
Bucket: destBucketName,
|
Bucket: destBucketName,
|
||||||
Key: destObjName,
|
Key: destObjName,
|
||||||
|
@ -279,17 +275,17 @@ describe('Object Part Copy', () => {
|
||||||
},
|
},
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.Bucket, destBucketName);
|
expect(res.Bucket).toBe(destBucketName);
|
||||||
assert.strictEqual(res.Key, destObjName);
|
expect(res.Key).toBe(destObjName);
|
||||||
assert.strictEqual(res.ETag, finalMpuETag);
|
expect(res.ETag).toBe(finalMpuETag);
|
||||||
s3.getObject({
|
s3.getObject({
|
||||||
Bucket: destBucketName,
|
Bucket: destBucketName,
|
||||||
Key: destObjName,
|
Key: destObjName,
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.ETag, finalMpuETag);
|
expect(res.ETag).toBe(finalMpuETag);
|
||||||
assert.strictEqual(res.ContentLength, '4');
|
expect(res.ContentLength).toBe('4');
|
||||||
assert.strictEqual(res.Body.toString(), 'I am');
|
expect(res.Body.toString()).toBe('I am');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -376,7 +372,7 @@ describe('Object Part Copy', () => {
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
|
|
||||||
it('should copy a part from a source bucket to a different ' +
|
test('should copy a part from a source bucket to a different ' +
|
||||||
'destination bucket', done => {
|
'destination bucket', done => {
|
||||||
process.stdout.write('Entered first mpu test');
|
process.stdout.write('Entered first mpu test');
|
||||||
return s3.uploadPartCopy({ Bucket: destBucketName,
|
return s3.uploadPartCopy({ Bucket: destBucketName,
|
||||||
|
@ -387,14 +383,13 @@ describe('Object Part Copy', () => {
|
||||||
},
|
},
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.ETag,
|
expect(res.ETag).toBe(totalMpuObjectHash);
|
||||||
totalMpuObjectHash);
|
expect(res.LastModified).toBeTruthy();
|
||||||
assert(res.LastModified);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy two parts from a source bucket to a different ' +
|
test('should copy two parts from a source bucket to a different ' +
|
||||||
'destination bucket and complete the MPU', () => {
|
'destination bucket and complete the MPU', () => {
|
||||||
process.stdout.write('Putting first part in MPU test');
|
process.stdout.write('Putting first part in MPU test');
|
||||||
return s3.uploadPartCopyAsync({ Bucket: destBucketName,
|
return s3.uploadPartCopyAsync({ Bucket: destBucketName,
|
||||||
|
@ -403,8 +398,8 @@ describe('Object Part Copy', () => {
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: uploadId,
|
UploadId: uploadId,
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.ETag, totalMpuObjectHash);
|
expect(res.ETag).toBe(totalMpuObjectHash);
|
||||||
assert(res.LastModified);
|
expect(res.LastModified).toBeTruthy();
|
||||||
}).then(() => {
|
}).then(() => {
|
||||||
process.stdout.write('Putting second part in MPU test');
|
process.stdout.write('Putting second part in MPU test');
|
||||||
return s3.uploadPartCopyAsync({ Bucket: destBucketName,
|
return s3.uploadPartCopyAsync({ Bucket: destBucketName,
|
||||||
|
@ -413,8 +408,8 @@ describe('Object Part Copy', () => {
|
||||||
PartNumber: 2,
|
PartNumber: 2,
|
||||||
UploadId: uploadId,
|
UploadId: uploadId,
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.ETag, totalMpuObjectHash);
|
expect(res.ETag).toBe(totalMpuObjectHash);
|
||||||
assert(res.LastModified);
|
expect(res.LastModified).toBeTruthy();
|
||||||
}).then(() => {
|
}).then(() => {
|
||||||
process.stdout.write('Completing MPU');
|
process.stdout.write('Completing MPU');
|
||||||
return s3.completeMultipartUploadAsync({
|
return s3.completeMultipartUploadAsync({
|
||||||
|
@ -429,19 +424,18 @@ describe('Object Part Copy', () => {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.Bucket, destBucketName);
|
expect(res.Bucket).toBe(destBucketName);
|
||||||
assert.strictEqual(res.Key, destObjName);
|
expect(res.Key).toBe(destObjName);
|
||||||
// combined ETag returned by AWS (combination of part ETags
|
// combined ETag returned by AWS (combination of part ETags
|
||||||
// with number of parts at the end)
|
// with number of parts at the end)
|
||||||
assert.strictEqual(res.ETag,
|
expect(res.ETag).toBe('"5bba96810ff449d94aa8f5c5a859b0cb-2"');
|
||||||
'"5bba96810ff449d94aa8f5c5a859b0cb-2"');
|
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy two parts with range headers from a source ' +
|
test('should copy two parts with range headers from a source ' +
|
||||||
'bucket to a different destination bucket and ' +
|
'bucket to a different destination bucket and ' +
|
||||||
'complete the MPU', () => {
|
'complete the MPU', () => {
|
||||||
process.stdout.write('Putting first part in MPU range test');
|
process.stdout.write('Putting first part in MPU range test');
|
||||||
|
@ -458,8 +452,8 @@ describe('Object Part Copy', () => {
|
||||||
UploadId: uploadId,
|
UploadId: uploadId,
|
||||||
CopySourceRange: 'bytes=5242890-15242880',
|
CopySourceRange: 'bytes=5242890-15242880',
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.ETag, part1ETag);
|
expect(res.ETag).toBe(part1ETag);
|
||||||
assert(res.LastModified);
|
expect(res.LastModified).toBeTruthy();
|
||||||
}).then(() => {
|
}).then(() => {
|
||||||
process.stdout.write('Putting second part in MPU test');
|
process.stdout.write('Putting second part in MPU test');
|
||||||
return s3.uploadPartCopyAsync({ Bucket: destBucketName,
|
return s3.uploadPartCopyAsync({ Bucket: destBucketName,
|
||||||
|
@ -469,8 +463,8 @@ describe('Object Part Copy', () => {
|
||||||
UploadId: uploadId,
|
UploadId: uploadId,
|
||||||
CopySourceRange: 'bytes=15242891-30242991',
|
CopySourceRange: 'bytes=15242891-30242991',
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.ETag, part2ETag);
|
expect(res.ETag).toBe(part2ETag);
|
||||||
assert(res.LastModified);
|
expect(res.LastModified).toBeTruthy();
|
||||||
}).then(() => {
|
}).then(() => {
|
||||||
process.stdout.write('Completing MPU');
|
process.stdout.write('Completing MPU');
|
||||||
return s3.completeMultipartUploadAsync({
|
return s3.completeMultipartUploadAsync({
|
||||||
|
@ -485,9 +479,9 @@ describe('Object Part Copy', () => {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.Bucket, destBucketName);
|
expect(res.Bucket).toBe(destBucketName);
|
||||||
assert.strictEqual(res.Key, destObjName);
|
expect(res.Key).toBe(destObjName);
|
||||||
assert.strictEqual(res.ETag, finalCombinedETag);
|
expect(res.ETag).toBe(finalCombinedETag);
|
||||||
}).then(() => {
|
}).then(() => {
|
||||||
process.stdout.write('Getting new object');
|
process.stdout.write('Getting new object');
|
||||||
return s3.getObjectAsync({
|
return s3.getObjectAsync({
|
||||||
|
@ -495,8 +489,8 @@ describe('Object Part Copy', () => {
|
||||||
Key: destObjName,
|
Key: destObjName,
|
||||||
});
|
});
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.ContentLength, '25000092');
|
expect(res.ContentLength).toBe('25000092');
|
||||||
assert.strictEqual(res.ETag, finalCombinedETag);
|
expect(res.ETag).toBe(finalCombinedETag);
|
||||||
})
|
})
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
@ -504,7 +498,7 @@ describe('Object Part Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should overwrite an existing part by copying a part', () => {
|
test('should overwrite an existing part by copying a part', () => {
|
||||||
// AWS response etag for this completed MPU
|
// AWS response etag for this completed MPU
|
||||||
const finalObjETag = '"db77ebbae9e9f5a244a26b86193ad818-1"';
|
const finalObjETag = '"db77ebbae9e9f5a244a26b86193ad818-1"';
|
||||||
process.stdout.write('Putting first part in MPU test');
|
process.stdout.write('Putting first part in MPU test');
|
||||||
|
@ -514,8 +508,8 @@ describe('Object Part Copy', () => {
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: uploadId,
|
UploadId: uploadId,
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.ETag, totalMpuObjectHash);
|
expect(res.ETag).toBe(totalMpuObjectHash);
|
||||||
assert(res.LastModified);
|
expect(res.LastModified).toBeTruthy();
|
||||||
}).then(() => {
|
}).then(() => {
|
||||||
process.stdout.write('Overwriting first part in MPU test');
|
process.stdout.write('Overwriting first part in MPU test');
|
||||||
return s3.uploadPartCopyAsync({ Bucket: destBucketName,
|
return s3.uploadPartCopyAsync({ Bucket: destBucketName,
|
||||||
|
@ -524,8 +518,8 @@ describe('Object Part Copy', () => {
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: uploadId });
|
UploadId: uploadId });
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.ETag, etag);
|
expect(res.ETag).toBe(etag);
|
||||||
assert(res.LastModified);
|
expect(res.LastModified).toBeTruthy();
|
||||||
}).then(() => {
|
}).then(() => {
|
||||||
process.stdout.write('Completing MPU');
|
process.stdout.write('Completing MPU');
|
||||||
return s3.completeMultipartUploadAsync({
|
return s3.completeMultipartUploadAsync({
|
||||||
|
@ -539,9 +533,9 @@ describe('Object Part Copy', () => {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.Bucket, destBucketName);
|
expect(res.Bucket).toBe(destBucketName);
|
||||||
assert.strictEqual(res.Key, destObjName);
|
expect(res.Key).toBe(destObjName);
|
||||||
assert.strictEqual(res.ETag, finalObjETag);
|
expect(res.ETag).toBe(finalObjETag);
|
||||||
}).then(() => {
|
}).then(() => {
|
||||||
process.stdout.write('Getting object put by MPU with ' +
|
process.stdout.write('Getting object put by MPU with ' +
|
||||||
'overwrite part');
|
'overwrite part');
|
||||||
|
@ -550,15 +544,14 @@ describe('Object Part Copy', () => {
|
||||||
Key: destObjName,
|
Key: destObjName,
|
||||||
});
|
});
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.ETag, finalObjETag);
|
expect(res.ETag).toBe(finalObjETag);
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error if no such upload initiated',
|
test('should return an error if no such upload initiated', done => {
|
||||||
done => {
|
|
||||||
s3.uploadPartCopy({ Bucket: destBucketName, Key: destObjName,
|
s3.uploadPartCopy({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
|
@ -570,7 +563,8 @@ describe('Object Part Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error if attempt to copy from nonexistent bucket',
|
test(
|
||||||
|
'should return an error if attempt to copy from nonexistent bucket',
|
||||||
done => {
|
done => {
|
||||||
s3.uploadPartCopy({ Bucket: destBucketName, Key: destObjName,
|
s3.uploadPartCopy({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `nobucket453234/${sourceObjName}`,
|
CopySource: `nobucket453234/${sourceObjName}`,
|
||||||
|
@ -581,9 +575,11 @@ describe('Object Part Copy', () => {
|
||||||
checkError(err, 'NoSuchBucket');
|
checkError(err, 'NoSuchBucket');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return an error if attempt to copy to nonexistent bucket',
|
test(
|
||||||
|
'should return an error if attempt to copy to nonexistent bucket',
|
||||||
done => {
|
done => {
|
||||||
s3.uploadPartCopy({ Bucket: 'nobucket453234', Key: destObjName,
|
s3.uploadPartCopy({ Bucket: 'nobucket453234', Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
|
@ -594,10 +590,10 @@ describe('Object Part Copy', () => {
|
||||||
checkError(err, 'NoSuchBucket');
|
checkError(err, 'NoSuchBucket');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return an error if attempt to copy nonexistent object',
|
test('should return an error if attempt to copy nonexistent object', done => {
|
||||||
done => {
|
|
||||||
s3.uploadPartCopy({ Bucket: destBucketName, Key: destObjName,
|
s3.uploadPartCopy({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/nokey`,
|
CopySource: `${sourceBucketName}/nokey`,
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
|
@ -609,8 +605,7 @@ describe('Object Part Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error if use invalid part number',
|
test('should return an error if use invalid part number', done => {
|
||||||
done => {
|
|
||||||
s3.uploadPartCopy({ Bucket: destBucketName, Key: destObjName,
|
s3.uploadPartCopy({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/nokey`,
|
CopySource: `${sourceBucketName}/nokey`,
|
||||||
PartNumber: 10001,
|
PartNumber: 10001,
|
||||||
|
@ -663,10 +658,9 @@ describe('Object Part Copy', () => {
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
}).then(() => otherAccountBucketUtility
|
}).then(() => otherAccountBucketUtility
|
||||||
.deleteOne(otherAccountBucket))
|
.deleteOne(otherAccountBucket)));
|
||||||
);
|
|
||||||
|
|
||||||
it('should not allow an account without read persmission on the ' +
|
test('should not allow an account without read persmission on the ' +
|
||||||
'source object to copy the object', done => {
|
'source object to copy the object', done => {
|
||||||
otherAccountS3.uploadPartCopy({ Bucket: otherAccountBucket,
|
otherAccountS3.uploadPartCopy({ Bucket: otherAccountBucket,
|
||||||
Key: otherAccountKey,
|
Key: otherAccountKey,
|
||||||
|
@ -680,7 +674,7 @@ describe('Object Part Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow an account without write persmission on the ' +
|
test('should not allow an account without write persmission on the ' +
|
||||||
'destination bucket to upload part copy the object', done => {
|
'destination bucket to upload part copy the object', done => {
|
||||||
otherAccountS3.putObject({ Bucket: otherAccountBucket,
|
otherAccountS3.putObject({ Bucket: otherAccountBucket,
|
||||||
Key: otherAccountKey, Body: '' }, () => {
|
Key: otherAccountKey, Body: '' }, () => {
|
||||||
|
@ -697,7 +691,7 @@ describe('Object Part Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow an account with read permission on the ' +
|
test('should allow an account with read permission on the ' +
|
||||||
'source object and write permission on the destination ' +
|
'source object and write permission on the destination ' +
|
||||||
'bucket to upload part copy the object', done => {
|
'bucket to upload part copy the object', done => {
|
||||||
s3.putObjectAcl({ Bucket: sourceBucketName,
|
s3.putObjectAcl({ Bucket: sourceBucketName,
|
||||||
|
|
|
@ -233,8 +233,7 @@ function _checkHeaders(action, params, origin, expectedHeaders, callback) {
|
||||||
function _runAssertions(resHeaders, cb) {
|
function _runAssertions(resHeaders, cb) {
|
||||||
if (expectedHeaders) {
|
if (expectedHeaders) {
|
||||||
Object.keys(expectedHeaders).forEach(key => {
|
Object.keys(expectedHeaders).forEach(key => {
|
||||||
assert.deepEqual(resHeaders[key], expectedHeaders[key],
|
expect(resHeaders[key]).toEqual(expectedHeaders[key]);
|
||||||
`error header: ${key}`);
|
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
// if no headersResponse provided, should not have these headers
|
// if no headersResponse provided, should not have these headers
|
||||||
|
@ -243,8 +242,7 @@ function _checkHeaders(action, params, origin, expectedHeaders, callback) {
|
||||||
'access-control-allow-methods',
|
'access-control-allow-methods',
|
||||||
'access-control-allow-credentials',
|
'access-control-allow-credentials',
|
||||||
'vary'].forEach(key => {
|
'vary'].forEach(key => {
|
||||||
assert.strictEqual(resHeaders[key], undefined,
|
expect(resHeaders[key]).toBe(undefined);
|
||||||
`Error: ${key} should not have value`);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
cb();
|
cb();
|
||||||
|
@ -297,7 +295,7 @@ describe('Cross Origin Resource Sharing requests', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('on non-existing bucket', () => {
|
describe('on non-existing bucket', () => {
|
||||||
it('should not respond to request with CORS headers, even ' +
|
test('should not respond to request with CORS headers, even ' +
|
||||||
'if request was sent with Origin header', done => {
|
'if request was sent with Origin header', done => {
|
||||||
_checkHeaders(s3.listObjects, { Bucket: 'nonexistingbucket' },
|
_checkHeaders(s3.listObjects, { Bucket: 'nonexistingbucket' },
|
||||||
allowedOrigin, null, done);
|
allowedOrigin, null, done);
|
||||||
|
@ -305,7 +303,7 @@ describe('Cross Origin Resource Sharing requests', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('on bucket without CORS configuration', () => {
|
describe('on bucket without CORS configuration', () => {
|
||||||
it('should not respond to request with CORS headers, even ' +
|
test('should not respond to request with CORS headers, even ' +
|
||||||
'if request was sent with Origin header', done => {
|
'if request was sent with Origin header', done => {
|
||||||
_checkHeaders(s3.listObjects, { Bucket: bucket },
|
_checkHeaders(s3.listObjects, { Bucket: bucket },
|
||||||
allowedOrigin, null, done);
|
allowedOrigin, null, done);
|
||||||
|
@ -340,24 +338,24 @@ describe('Cross Origin Resource Sharing requests', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('when request Origin/method match CORS configuration', () => {
|
describe('when request Origin/method match CORS configuration', () => {
|
||||||
it('should not respond with CORS headers to GET service (list ' +
|
test('should not respond with CORS headers to GET service (list ' +
|
||||||
'buckets), even if Origin/method match CORS rule', done => {
|
'buckets), even if Origin/method match CORS rule', done => {
|
||||||
// no bucket specified in this request
|
// no bucket specified in this request
|
||||||
_checkHeaders(s3.listBuckets, {}, allowedOrigin,
|
_checkHeaders(s3.listBuckets, {}, allowedOrigin,
|
||||||
null, done);
|
null, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not respond with CORS headers after deleting bucket, ' +
|
test('should not respond with CORS headers after deleting bucket, ' +
|
||||||
'even if Origin/method match CORS rule', done => {
|
'even if Origin/method match CORS rule', done => {
|
||||||
s3.deleteBucket({ Bucket: bucket }, err => {
|
s3.deleteBucket({ Bucket: bucket }, err => {
|
||||||
assert.strictEqual(err, null, `Unexpected err ${err}`);
|
expect(err).toBe(null);
|
||||||
_checkHeaders(s3.listObjects, { Bucket: bucket },
|
_checkHeaders(s3.listObjects, { Bucket: bucket },
|
||||||
allowedOrigin, null, done);
|
allowedOrigin, null, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
apiMethods.forEach(method => {
|
apiMethods.forEach(method => {
|
||||||
it(`should respond to ${method.description} with CORS ` +
|
test(`should respond to ${method.description} with CORS ` +
|
||||||
'headers (access-control-allow-origin, access-control-allow-' +
|
'headers (access-control-allow-origin, access-control-allow-' +
|
||||||
'methods, access-control-allow-credentials and vary)', done => {
|
'methods, access-control-allow-credentials and vary)', done => {
|
||||||
_checkHeaders(method.action, method.params, allowedOrigin,
|
_checkHeaders(method.action, method.params, allowedOrigin,
|
||||||
|
@ -368,7 +366,7 @@ describe('Cross Origin Resource Sharing requests', () => {
|
||||||
|
|
||||||
describe('when request Origin does not match CORS rule', () => {
|
describe('when request Origin does not match CORS rule', () => {
|
||||||
apiMethods.forEach(method => {
|
apiMethods.forEach(method => {
|
||||||
it(`should not respond to ${method.description} with ` +
|
test(`should not respond to ${method.description} with ` +
|
||||||
'CORS headers', done => {
|
'CORS headers', done => {
|
||||||
_checkHeaders(method.action, method.params,
|
_checkHeaders(method.action, method.params,
|
||||||
notAllowedOrigin, null, done);
|
notAllowedOrigin, null, done);
|
||||||
|
@ -392,7 +390,7 @@ describe('Cross Origin Resource Sharing requests', () => {
|
||||||
s3.deleteBucketCors({ Bucket: bucket }, done);
|
s3.deleteBucketCors({ Bucket: bucket }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('when request method does not match CORS rule ' +
|
test('when request method does not match CORS rule ' +
|
||||||
'should not respond with CORS headers', done => {
|
'should not respond with CORS headers', done => {
|
||||||
_checkHeaders(s3.listObjects, { Bucket: bucket },
|
_checkHeaders(s3.listObjects, { Bucket: bucket },
|
||||||
allowedOrigin, null, done);
|
allowedOrigin, null, done);
|
||||||
|
@ -433,16 +431,13 @@ describe('Cross Origin Resource Sharing requests', () => {
|
||||||
ACL: 'public-read',
|
ACL: 'public-read',
|
||||||
}, next),
|
}, next),
|
||||||
], err => {
|
], err => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Unexpected err ${err} in beforeEach`);
|
|
||||||
done(err);
|
done(err);
|
||||||
})
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
afterEach(done =>
|
afterEach(done =>
|
||||||
s3.deleteObject({ Bucket: bucket, Key: 'index.html' }, err => {
|
s3.deleteObject({ Bucket: bucket, Key: 'index.html' }, err => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Unexpected err ${err} in afterEach`);
|
|
||||||
s3.deleteBucket({ Bucket: bucket }, err => {
|
s3.deleteBucket({ Bucket: bucket }, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
process.stdout.write(`Error in afterEach ${err}`);
|
process.stdout.write(`Error in afterEach ${err}`);
|
||||||
|
@ -450,34 +445,29 @@ describe('Cross Origin Resource Sharing requests', () => {
|
||||||
}
|
}
|
||||||
return _waitForAWS(done);
|
return _waitForAWS(done);
|
||||||
});
|
});
|
||||||
})
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
it('should respond with CORS headers at website endpoint (GET)',
|
test('should respond with CORS headers at website endpoint (GET)', done => {
|
||||||
done => {
|
|
||||||
const headers = { Origin: allowedOrigin };
|
const headers = { Origin: allowedOrigin };
|
||||||
methodRequest({ method: 'GET', bucket, headers, headersResponse,
|
methodRequest({ method: 'GET', bucket, headers, headersResponse,
|
||||||
code: 200, isWebsite: true }, done);
|
code: 200, isWebsite: true }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should respond with CORS headers at website endpoint (GET) ' +
|
test('should respond with CORS headers at website endpoint (GET) ' +
|
||||||
'even in case of error',
|
'even in case of error', done => {
|
||||||
done => {
|
|
||||||
const headers = { Origin: allowedOrigin };
|
const headers = { Origin: allowedOrigin };
|
||||||
methodRequest({ method: 'GET', bucket, objectKey: 'test',
|
methodRequest({ method: 'GET', bucket, objectKey: 'test',
|
||||||
headers, headersResponse, code: 404, isWebsite: true }, done);
|
headers, headersResponse, code: 404, isWebsite: true }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should respond with CORS headers at website endpoint (GET) ' +
|
test('should respond with CORS headers at website endpoint (GET) ' +
|
||||||
'even in case of redirect',
|
'even in case of redirect', done => {
|
||||||
done => {
|
|
||||||
const headers = { Origin: allowedOrigin };
|
const headers = { Origin: allowedOrigin };
|
||||||
methodRequest({ method: 'GET', bucket, objectKey: 'redirect',
|
methodRequest({ method: 'GET', bucket, objectKey: 'redirect',
|
||||||
headers, headersResponse, code: 301, isWebsite: true }, done);
|
headers, headersResponse, code: 301, isWebsite: true }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should respond with CORS headers at website endpoint (HEAD)',
|
test('should respond with CORS headers at website endpoint (HEAD)', done => {
|
||||||
done => {
|
|
||||||
const headers = { Origin: allowedOrigin };
|
const headers = { Origin: allowedOrigin };
|
||||||
methodRequest({ method: 'HEAD', bucket, headers, headersResponse,
|
methodRequest({ method: 'HEAD', bucket, headers, headersResponse,
|
||||||
code: 200, isWebsite: true }, done);
|
code: 200, isWebsite: true }, done);
|
||||||
|
@ -506,7 +496,7 @@ describe('Cross Origin Resource Sharing requests', () => {
|
||||||
s3.putBucketCors(corsParams, done);
|
s3.putBucketCors(corsParams, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not return access-control-allow-headers response ' +
|
test('should not return access-control-allow-headers response ' +
|
||||||
'header even if request matches CORS rule and other access-' +
|
'header even if request matches CORS rule and other access-' +
|
||||||
'control headers are returned', done => {
|
'control headers are returned', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
|
@ -518,10 +508,9 @@ describe('Cross Origin Resource Sharing requests', () => {
|
||||||
headersOmitted, code: 200 }, done);
|
headersOmitted, code: 200 }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('Request with matching Origin/method but additional headers ' +
|
test('Request with matching Origin/method but additional headers ' +
|
||||||
'that violate CORS rule:\n\t should still respond with access-' +
|
'that violate CORS rule:\n\t should still respond with access-' +
|
||||||
'control headers (headers are only checked in preflight requests)',
|
'control headers (headers are only checked in preflight requests)', done => {
|
||||||
done => {
|
|
||||||
const headers = {
|
const headers = {
|
||||||
Origin: allowedOrigin,
|
Origin: allowedOrigin,
|
||||||
Test: 'test',
|
Test: 'test',
|
||||||
|
@ -553,7 +542,7 @@ describe('Cross Origin Resource Sharing requests', () => {
|
||||||
s3.putBucketCors(corsParams, done);
|
s3.putBucketCors(corsParams, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should respond with ${elem.header} header ` +
|
test(`should respond with ${elem.header} header ` +
|
||||||
'if request matches CORS rule', done => {
|
'if request matches CORS rule', done => {
|
||||||
const headers = { Origin: allowedOrigin };
|
const headers = { Origin: allowedOrigin };
|
||||||
const headersResponse = {
|
const headersResponse = {
|
||||||
|
|
|
@ -29,19 +29,19 @@ function _waitForAWS(callback, err) {
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('Preflight CORS request on non-existing bucket', () => {
|
describe('Preflight CORS request on non-existing bucket', () => {
|
||||||
it('should respond no such bucket if bucket does not exist', done => {
|
test('should respond no such bucket if bucket does not exist', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
Origin: allowedOrigin,
|
Origin: allowedOrigin,
|
||||||
};
|
};
|
||||||
methodRequest({ method: 'GET', bucket, headers, code: 'NoSuchBucket',
|
methodRequest({ method: 'GET', bucket, headers, code: 'NoSuchBucket',
|
||||||
headersResponse: null }, done);
|
headersResponse: null }, done);
|
||||||
});
|
});
|
||||||
it('should return BadRequest for OPTIONS request without origin', done => {
|
test('should return BadRequest for OPTIONS request without origin', done => {
|
||||||
const headers = {};
|
const headers = {};
|
||||||
methodRequest({ method: 'OPTIONS', bucket, headers, code: 'BadRequest',
|
methodRequest({ method: 'OPTIONS', bucket, headers, code: 'BadRequest',
|
||||||
headersResponse: null }, done);
|
headersResponse: null }, done);
|
||||||
});
|
});
|
||||||
it('should return BadRequest for OPTIONS request without ' +
|
test('should return BadRequest for OPTIONS request without ' +
|
||||||
'Access-Control-Request-Method', done => {
|
'Access-Control-Request-Method', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
Origin: allowedOrigin,
|
Origin: allowedOrigin,
|
||||||
|
@ -63,7 +63,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow GET on bucket without cors configuration even if ' +
|
test('should allow GET on bucket without cors configuration even if ' +
|
||||||
'Origin header sent', done => {
|
'Origin header sent', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
Origin: allowedOrigin,
|
Origin: allowedOrigin,
|
||||||
|
@ -71,7 +71,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'GET', bucket, headers, code: 200,
|
methodRequest({ method: 'GET', bucket, headers, code: 200,
|
||||||
headersResponse: null }, done);
|
headersResponse: null }, done);
|
||||||
});
|
});
|
||||||
it('should allow HEAD on bucket without cors configuration even if ' +
|
test('should allow HEAD on bucket without cors configuration even if ' +
|
||||||
'Origin header sent', done => {
|
'Origin header sent', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
Origin: allowedOrigin,
|
Origin: allowedOrigin,
|
||||||
|
@ -79,7 +79,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'HEAD', bucket, headers, code: 200,
|
methodRequest({ method: 'HEAD', bucket, headers, code: 200,
|
||||||
headersResponse: null }, done);
|
headersResponse: null }, done);
|
||||||
});
|
});
|
||||||
it('should respond AccessForbidden for OPTIONS request on bucket without ' +
|
test('should respond AccessForbidden for OPTIONS request on bucket without ' +
|
||||||
'CORSConfiguration', done => {
|
'CORSConfiguration', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
|
@ -115,7 +115,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
methods.forEach(method => {
|
methods.forEach(method => {
|
||||||
it('should respond with 200 and access control headers to ' +
|
test('should respond with 200 and access control headers to ' +
|
||||||
'OPTIONS request from allowed origin and allowed method ' +
|
'OPTIONS request from allowed origin and allowed method ' +
|
||||||
`"${method}"`, done => {
|
`"${method}"`, done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
|
@ -132,7 +132,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
headersResponse }, done);
|
headersResponse }, done);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should respond AccessForbidden to OPTIONS request from ' +
|
test('should respond AccessForbidden to OPTIONS request from ' +
|
||||||
'not allowed origin', done => {
|
'not allowed origin', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
|
@ -143,7 +143,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'OPTIONS', bucket, headers,
|
methodRequest({ method: 'OPTIONS', bucket, headers,
|
||||||
code: 'AccessForbidden', headersResponse: null }, done);
|
code: 'AccessForbidden', headersResponse: null }, done);
|
||||||
});
|
});
|
||||||
it('should respond AccessForbidden to OPTIONS request with ' +
|
test('should respond AccessForbidden to OPTIONS request with ' +
|
||||||
'not allowed Access-Control-Request-Headers', done => {
|
'not allowed Access-Control-Request-Headers', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': 'http://www.forbiddenwebsite.com',
|
'Origin': 'http://www.forbiddenwebsite.com',
|
||||||
|
@ -178,7 +178,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
s3.deleteBucketCors({ Bucket: bucket }, done);
|
s3.deleteBucketCors({ Bucket: bucket }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should respond with 200 and access control headers to OPTIONS ' +
|
test('should respond with 200 and access control headers to OPTIONS ' +
|
||||||
'request from allowed origin and method "GET"', done => {
|
'request from allowed origin and method "GET"', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
|
@ -193,7 +193,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
||||||
headersResponse }, done);
|
headersResponse }, done);
|
||||||
});
|
});
|
||||||
it('should respond AccessForbidden to OPTIONS request with allowed ' +
|
test('should respond AccessForbidden to OPTIONS request with allowed ' +
|
||||||
'method but not from allowed origin', done => {
|
'method but not from allowed origin', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': 'http://www.forbiddenwebsite.com',
|
'Origin': 'http://www.forbiddenwebsite.com',
|
||||||
|
@ -202,9 +202,8 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'OPTIONS', bucket, headers,
|
methodRequest({ method: 'OPTIONS', bucket, headers,
|
||||||
code: 'AccessForbidden', headersResponse: null }, done);
|
code: 'AccessForbidden', headersResponse: null }, done);
|
||||||
});
|
});
|
||||||
it('should respond AccessForbidden to OPTIONS request from allowed ' +
|
test('should respond AccessForbidden to OPTIONS request from allowed ' +
|
||||||
'origin and method but with not allowed Access-Control-Request-Headers',
|
'origin and method but with not allowed Access-Control-Request-Headers', done => {
|
||||||
done => {
|
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
'Access-Control-Request-Method': 'GET',
|
'Access-Control-Request-Method': 'GET',
|
||||||
|
@ -215,7 +214,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
code: 'AccessForbidden', headersResponse: null }, done);
|
code: 'AccessForbidden', headersResponse: null }, done);
|
||||||
});
|
});
|
||||||
['PUT', 'POST', 'DELETE'].forEach(method => {
|
['PUT', 'POST', 'DELETE'].forEach(method => {
|
||||||
it('should respond AccessForbidden to OPTIONS request from ' +
|
test('should respond AccessForbidden to OPTIONS request from ' +
|
||||||
`allowed origin but not allowed method "${method}"`, done => {
|
`allowed origin but not allowed method "${method}"`, done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
|
@ -249,9 +248,8 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
s3.deleteBucketCors({ Bucket: bucket }, done);
|
s3.deleteBucketCors({ Bucket: bucket }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should respond with 200 and access control headers to ' +
|
test('should respond with 200 and access control headers to ' +
|
||||||
`OPTIONS request from allowed origin and method "${allowedMethod}"`,
|
`OPTIONS request from allowed origin and method "${allowedMethod}"`, done => {
|
||||||
done => {
|
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
'Access-Control-Request-Method': allowedMethod,
|
'Access-Control-Request-Method': allowedMethod,
|
||||||
|
@ -264,7 +262,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
||||||
headersResponse }, done);
|
headersResponse }, done);
|
||||||
});
|
});
|
||||||
it('should respond AccessForbidden to OPTIONS request from ' +
|
test('should respond AccessForbidden to OPTIONS request from ' +
|
||||||
'allowed origin and method but with not allowed Access-Control-' +
|
'allowed origin and method but with not allowed Access-Control-' +
|
||||||
'Request-Headers', done => {
|
'Request-Headers', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
|
@ -278,7 +276,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
});
|
});
|
||||||
methods.filter(method => method !== allowedMethod)
|
methods.filter(method => method !== allowedMethod)
|
||||||
.forEach(method => {
|
.forEach(method => {
|
||||||
it('should respond AccessForbidden to OPTIONS request from ' +
|
test('should respond AccessForbidden to OPTIONS request from ' +
|
||||||
`allowed origin but not allowed method "${method}"`, done => {
|
`allowed origin but not allowed method "${method}"`, done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
|
@ -317,9 +315,8 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
|
|
||||||
[originWithoutWildcard, originReplaceWildcard]
|
[originWithoutWildcard, originReplaceWildcard]
|
||||||
.forEach(acceptableOrigin => {
|
.forEach(acceptableOrigin => {
|
||||||
it('should return 200 and CORS header to OPTIONS request ' +
|
test('should return 200 and CORS header to OPTIONS request ' +
|
||||||
`from allowed method and origin "${acceptableOrigin}"`,
|
`from allowed method and origin "${acceptableOrigin}"`, done => {
|
||||||
done => {
|
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': acceptableOrigin,
|
'Origin': acceptableOrigin,
|
||||||
'Access-Control-Request-Method': 'GET',
|
'Access-Control-Request-Method': 'GET',
|
||||||
|
@ -335,9 +332,8 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
if (!origin.endsWith('*')) {
|
if (!origin.endsWith('*')) {
|
||||||
it('should respond AccessForbidden to OPTIONS request from ' +
|
test('should respond AccessForbidden to OPTIONS request from ' +
|
||||||
`allowed method and origin "${originWithoutWildcard}test"`,
|
`allowed method and origin "${originWithoutWildcard}test"`, done => {
|
||||||
done => {
|
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': `${originWithoutWildcard}test`,
|
'Origin': `${originWithoutWildcard}test`,
|
||||||
'Access-Control-Request-Method': 'GET',
|
'Access-Control-Request-Method': 'GET',
|
||||||
|
@ -347,9 +343,8 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
if (!origin.startsWith('*')) {
|
if (!origin.startsWith('*')) {
|
||||||
it('should respond AccessForbidden to OPTIONS request from ' +
|
test('should respond AccessForbidden to OPTIONS request from ' +
|
||||||
`allowed method and origin "test${originWithoutWildcard}"`,
|
`allowed method and origin "test${originWithoutWildcard}"`, done => {
|
||||||
done => {
|
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': `test${originWithoutWildcard}`,
|
'Origin': `test${originWithoutWildcard}`,
|
||||||
'Access-Control-Request-Method': 'GET',
|
'Access-Control-Request-Method': 'GET',
|
||||||
|
@ -397,7 +392,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
s3.deleteBucketCors({ Bucket: bucket }, done);
|
s3.deleteBucketCors({ Bucket: bucket }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('if OPTIONS request matches rule with multiple origins, response ' +
|
test('if OPTIONS request matches rule with multiple origins, response ' +
|
||||||
'access-control-request-origin header value should be request Origin ' +
|
'access-control-request-origin header value should be request Origin ' +
|
||||||
'(not list of AllowedOrigins)', done => {
|
'(not list of AllowedOrigins)', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
|
@ -413,7 +408,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
||||||
headersResponse }, done);
|
headersResponse }, done);
|
||||||
});
|
});
|
||||||
it('if OPTIONS request matches rule with origin containing wildcard, ' +
|
test('if OPTIONS request matches rule with origin containing wildcard, ' +
|
||||||
'response access-control-request-origin header value should be ' +
|
'response access-control-request-origin header value should be ' +
|
||||||
'request Origin (not value containing wildcard)', done => {
|
'request Origin (not value containing wildcard)', done => {
|
||||||
const requestOrigin = originContainingWildcard.replace('*', 'test');
|
const requestOrigin = originContainingWildcard.replace('*', 'test');
|
||||||
|
@ -430,7 +425,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
||||||
headersResponse }, done);
|
headersResponse }, done);
|
||||||
});
|
});
|
||||||
it('if OPTIONS request matches rule that allows all origins, ' +
|
test('if OPTIONS request matches rule that allows all origins, ' +
|
||||||
'e.g. "*", response access-control-request-origin header should ' +
|
'e.g. "*", response access-control-request-origin header should ' +
|
||||||
'return "*"', done => {
|
'return "*"', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
|
@ -475,7 +470,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
s3.deleteBucketCors({ Bucket: bucket }, done);
|
s3.deleteBucketCors({ Bucket: bucket }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should respond with 200 and access control headers to OPTIONS ' +
|
test('should respond with 200 and access control headers to OPTIONS ' +
|
||||||
'request from allowed origin and method, even without request ' +
|
'request from allowed origin and method, even without request ' +
|
||||||
'Access-Control-Request-Headers header value', done => {
|
'Access-Control-Request-Headers header value', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
|
@ -490,7 +485,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
||||||
headersResponse }, done);
|
headersResponse }, done);
|
||||||
});
|
});
|
||||||
it('should respond with 200 and access control headers to OPTIONS ' +
|
test('should respond with 200 and access control headers to OPTIONS ' +
|
||||||
'request from allowed origin and method with Access-Control-' +
|
'request from allowed origin and method with Access-Control-' +
|
||||||
'Request-Headers \'Content-Type\'', done => {
|
'Request-Headers \'Content-Type\'', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
|
@ -507,10 +502,9 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
||||||
headersResponse }, done);
|
headersResponse }, done);
|
||||||
});
|
});
|
||||||
it('should respond AccessForbidden to OPTIONS request from allowed ' +
|
test('should respond AccessForbidden to OPTIONS request from allowed ' +
|
||||||
'origin and method but not allowed Access-Control-Request-Headers ' +
|
'origin and method but not allowed Access-Control-Request-Headers ' +
|
||||||
'in addition to Content-Type',
|
'in addition to Content-Type', done => {
|
||||||
done => {
|
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
'Access-Control-Request-Method': 'GET',
|
'Access-Control-Request-Method': 'GET',
|
||||||
|
@ -561,10 +555,9 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
s3.deleteBucketCors({ Bucket: bucket }, done);
|
s3.deleteBucketCors({ Bucket: bucket }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return request access-control-request-headers value, ' +
|
test('should return request access-control-request-headers value, ' +
|
||||||
'not list of AllowedHeaders from rule or corresponding AllowedHeader ' +
|
'not list of AllowedHeaders from rule or corresponding AllowedHeader ' +
|
||||||
'value containing wildcard',
|
'value containing wildcard', done => {
|
||||||
done => {
|
|
||||||
const requestHeaderValue = 'amz-meta-header-test, content-type';
|
const requestHeaderValue = 'amz-meta-header-test, content-type';
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
|
@ -580,9 +573,8 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
||||||
headersResponse }, done);
|
headersResponse }, done);
|
||||||
});
|
});
|
||||||
it('should return lowercase version of request Access-Control-' +
|
test('should return lowercase version of request Access-Control-' +
|
||||||
'Request-Method header value if it contains any upper-case values',
|
'Request-Method header value if it contains any upper-case values', done => {
|
||||||
done => {
|
|
||||||
const requestHeaderValue = 'Content-Type';
|
const requestHeaderValue = 'Content-Type';
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
|
@ -599,10 +591,9 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
||||||
headersResponse }, done);
|
headersResponse }, done);
|
||||||
});
|
});
|
||||||
it('should remove empty comma-separated values derived from request ' +
|
test('should remove empty comma-separated values derived from request ' +
|
||||||
'Access-Control-Request-Method header and separate values with ' +
|
'Access-Control-Request-Method header and separate values with ' +
|
||||||
'spaces when responding with Access-Control-Allow-Headers header',
|
'spaces when responding with Access-Control-Allow-Headers header', done => {
|
||||||
done => {
|
|
||||||
const requestHeaderValue = 'content-type,,expires';
|
const requestHeaderValue = 'content-type,,expires';
|
||||||
const expectedValue = 'content-type, expires';
|
const expectedValue = 'content-type, expires';
|
||||||
const headers = {
|
const headers = {
|
||||||
|
@ -619,7 +610,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
methodRequest({ method: 'OPTIONS', bucket, headers, code: 200,
|
||||||
headersResponse }, done);
|
headersResponse }, done);
|
||||||
});
|
});
|
||||||
it('should return request Access-Control-Request-Headers value ' +
|
test('should return request Access-Control-Request-Headers value ' +
|
||||||
'even if rule allows all headers (e.g. "*"), unlike access-control-' +
|
'even if rule allows all headers (e.g. "*"), unlike access-control-' +
|
||||||
'allow-origin value', done => {
|
'allow-origin value', done => {
|
||||||
const requestHeaderValue = 'puppies';
|
const requestHeaderValue = 'puppies';
|
||||||
|
@ -676,9 +667,8 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should respond with 200 and access control headers to OPTIONS ' +
|
test('should respond with 200 and access control headers to OPTIONS ' +
|
||||||
'request from allowed origin, allowed method and existing object key',
|
'request from allowed origin, allowed method and existing object key', done => {
|
||||||
done => {
|
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
'Access-Control-Request-Method': 'GET',
|
'Access-Control-Request-Method': 'GET',
|
||||||
|
@ -692,7 +682,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
methodRequest({ method: 'OPTIONS', objectKey, bucket, headers,
|
methodRequest({ method: 'OPTIONS', objectKey, bucket, headers,
|
||||||
code: 200, headersResponse }, done);
|
code: 200, headersResponse }, done);
|
||||||
});
|
});
|
||||||
it('should respond with 200 and access control headers to OPTIONS ' +
|
test('should respond with 200 and access control headers to OPTIONS ' +
|
||||||
'request from allowed origin, allowed method, even with non-existing ' +
|
'request from allowed origin, allowed method, even with non-existing ' +
|
||||||
'object key', done => {
|
'object key', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
|
@ -730,7 +720,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
s3.deleteBucketCors({ Bucket: bucket }, done);
|
s3.deleteBucketCors({ Bucket: bucket }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with fake auth credentials: should respond with 200 and access ' +
|
test('with fake auth credentials: should respond with 200 and access ' +
|
||||||
'control headers even if request has fake auth credentials', done => {
|
'control headers even if request has fake auth credentials', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
|
@ -746,7 +736,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
headersResponse }, done);
|
headersResponse }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('with cookies: should send identical response as to request ' +
|
test('with cookies: should send identical response as to request ' +
|
||||||
'without cookies (200 and access control headers)', done => {
|
'without cookies (200 and access control headers)', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
|
@ -792,9 +782,8 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
s3.deleteBucketCors({ Bucket: bucket }, done);
|
s3.deleteBucketCors({ Bucket: bucket }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('if OPTIONS request matches CORS rule with ExposeHeader\'s, ' +
|
test('if OPTIONS request matches CORS rule with ExposeHeader\'s, ' +
|
||||||
'response should include Access-Control-Expose-Headers header',
|
'response should include Access-Control-Expose-Headers header', done => {
|
||||||
done => {
|
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
'Access-Control-Request-Method': 'GET',
|
'Access-Control-Request-Method': 'GET',
|
||||||
|
@ -836,7 +825,7 @@ describe('Preflight CORS request with existing bucket', () => {
|
||||||
s3.deleteBucketCors({ Bucket: bucket }, done);
|
s3.deleteBucketCors({ Bucket: bucket }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('if OPTIONS request matches CORS rule with max age seconds, ' +
|
test('if OPTIONS request matches CORS rule with max age seconds, ' +
|
||||||
'response should include Access-Control-Max-Age header', done => {
|
'response should include Access-Control-Max-Age header', done => {
|
||||||
const headers = {
|
const headers = {
|
||||||
'Origin': allowedOrigin,
|
'Origin': allowedOrigin,
|
||||||
|
|
|
@ -27,25 +27,21 @@ describe('DELETE multipart', () => {
|
||||||
UploadId: uploadId }, err => {
|
UploadId: uploadId }, err => {
|
||||||
const statusCode =
|
const statusCode =
|
||||||
request.response.httpResponse.statusCode;
|
request.response.httpResponse.statusCode;
|
||||||
assert.strictEqual(statusCode, statusCodeExpected,
|
expect(statusCode).toBe(statusCodeExpected);
|
||||||
`Found unexpected statusCode ${statusCode}`);
|
|
||||||
if (statusCode === 204) {
|
if (statusCode === 204) {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Expected no err but found ${err}`);
|
|
||||||
return callback(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
return callback();
|
return callback();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
it('on bucket that does not exist: should return NoSuchBucket',
|
test('on bucket that does not exist: should return NoSuchBucket', done => {
|
||||||
done => {
|
|
||||||
const uploadId = 'nonexistinguploadid';
|
const uploadId = 'nonexistinguploadid';
|
||||||
s3.abortMultipartUpload({ Bucket: bucket, Key: key,
|
s3.abortMultipartUpload({ Bucket: bucket, Key: key,
|
||||||
UploadId: uploadId }, err => {
|
UploadId: uploadId }, err => {
|
||||||
assert.notEqual(err, null,
|
expect(err).not.toEqual(null);
|
||||||
'Expected NoSuchBucket but found no err');
|
expect(err.code).toBe('NoSuchBucket');
|
||||||
assert.strictEqual(err.code, 'NoSuchBucket');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -61,8 +57,7 @@ describe('DELETE multipart', () => {
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
process.stdout.write(`Error in beforeEach: ${err}\n`);
|
process.stdout.write(`Error in beforeEach: ${err}\n`);
|
||||||
throw err;
|
throw err;
|
||||||
})
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
process.stdout.write('Emptying bucket\n');
|
process.stdout.write('Emptying bucket\n');
|
||||||
|
@ -101,10 +96,9 @@ describe('DELETE multipart', () => {
|
||||||
PartNumber: 1,
|
PartNumber: 1,
|
||||||
UploadId: uploadId,
|
UploadId: uploadId,
|
||||||
});
|
});
|
||||||
})
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
it('should return 204 for abortMultipartUpload', done => {
|
test('should return 204 for abortMultipartUpload', done => {
|
||||||
_assertStatusCode(uploadId, 204,
|
_assertStatusCode(uploadId, 204,
|
||||||
done);
|
done);
|
||||||
});
|
});
|
||||||
|
|
|
@ -20,9 +20,9 @@ const taggingConfig = { TagSet: [
|
||||||
] };
|
] };
|
||||||
|
|
||||||
function _checkError(err, code, statusCode) {
|
function _checkError(err, code, statusCode) {
|
||||||
assert(err, 'Expected error but found none');
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, code);
|
expect(err.code).toBe(code);
|
||||||
assert.strictEqual(err.statusCode, statusCode);
|
expect(err.statusCode).toBe(statusCode);
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('DELETE object taggings', () => {
|
describe('DELETE object taggings', () => {
|
||||||
|
@ -52,7 +52,7 @@ describe('DELETE object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete tag set', done => {
|
test('should delete tag set', done => {
|
||||||
s3.putObjectTagging({
|
s3.putObjectTagging({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
Key: objectName,
|
Key: objectName,
|
||||||
|
@ -62,22 +62,23 @@ describe('DELETE object taggings', () => {
|
||||||
s3.deleteObjectTagging({ Bucket: bucketName, Key: objectName },
|
s3.deleteObjectTagging({ Bucket: bucketName, Key: objectName },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.ifError(err, `Found unexpected err ${err}`);
|
assert.ifError(err, `Found unexpected err ${err}`);
|
||||||
assert.strictEqual(Object.keys(data).length, 0);
|
expect(Object.keys(data).length).toBe(0);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete a non-existing tag set', done => {
|
test('should delete a non-existing tag set', done => {
|
||||||
s3.deleteObjectTagging({ Bucket: bucketName, Key: objectName },
|
s3.deleteObjectTagging({ Bucket: bucketName, Key: objectName },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.ifError(err, `Found unexpected err ${err}`);
|
assert.ifError(err, `Found unexpected err ${err}`);
|
||||||
assert.strictEqual(Object.keys(data).length, 0);
|
expect(Object.keys(data).length).toBe(0);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return NoSuchKey deleting tag set to a non-existing object',
|
test(
|
||||||
|
'should return NoSuchKey deleting tag set to a non-existing object',
|
||||||
done => {
|
done => {
|
||||||
s3.deleteObjectTagging({
|
s3.deleteObjectTagging({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
|
@ -86,8 +87,9 @@ describe('DELETE object taggings', () => {
|
||||||
_checkError(err, 'NoSuchKey', 404);
|
_checkError(err, 'NoSuchKey', 404);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
it('should return 403 AccessDenied deleting tag set with another ' +
|
);
|
||||||
|
test('should return 403 AccessDenied deleting tag set with another ' +
|
||||||
'account', done => {
|
'account', done => {
|
||||||
otherAccountS3.deleteObjectTagging({ Bucket: bucketName, Key:
|
otherAccountS3.deleteObjectTagging({ Bucket: bucketName, Key:
|
||||||
objectName }, err => {
|
objectName }, err => {
|
||||||
|
@ -96,9 +98,8 @@ describe('DELETE object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 AccessDenied deleting tag set with a different ' +
|
test('should return 403 AccessDenied deleting tag set with a different ' +
|
||||||
'account to an object with ACL "public-read-write"',
|
'account to an object with ACL "public-read-write"', done => {
|
||||||
done => {
|
|
||||||
s3.putObjectAcl({ Bucket: bucketName, Key: objectName,
|
s3.putObjectAcl({ Bucket: bucketName, Key: objectName,
|
||||||
ACL: 'public-read-write' }, err => {
|
ACL: 'public-read-write' }, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -112,9 +113,8 @@ describe('DELETE object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 AccessDenied deleting tag set to an object' +
|
test('should return 403 AccessDenied deleting tag set to an object' +
|
||||||
' in a bucket created with a different account',
|
' in a bucket created with a different account', done => {
|
||||||
done => {
|
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.putBucketAcl({ Bucket: bucketName, ACL:
|
next => s3.putBucketAcl({ Bucket: bucketName, ACL:
|
||||||
'public-read-write' }, err => next(err)),
|
'public-read-write' }, err => next(err)),
|
||||||
|
@ -128,7 +128,7 @@ describe('DELETE object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete tag set to an object in a bucket created with same ' +
|
test('should delete tag set to an object in a bucket created with same ' +
|
||||||
'account even though object put by other account', done => {
|
'account even though object put by other account', done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.putBucketAcl({ Bucket: bucketName, ACL:
|
next => s3.putBucketAcl({ Bucket: bucketName, ACL:
|
||||||
|
|
|
@ -13,7 +13,7 @@ describe('DELETE object', () => {
|
||||||
const s3 = bucketUtil.s3;
|
const s3 = bucketUtil.s3;
|
||||||
const testfile = Buffer.alloc(1024 * 1024 * 54, 0);
|
const testfile = Buffer.alloc(1024 * 1024 * 54, 0);
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
process.stdout.write('creating bucket\n');
|
process.stdout.write('creating bucket\n');
|
||||||
return s3.createBucketAsync({ Bucket: bucketName })
|
return s3.createBucketAsync({ Bucket: bucketName })
|
||||||
.then(() => {
|
.then(() => {
|
||||||
|
@ -59,7 +59,7 @@ describe('DELETE object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('Emptying bucket\n');
|
process.stdout.write('Emptying bucket\n');
|
||||||
return bucketUtil.empty(bucketName)
|
return bucketUtil.empty(bucketName)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
|
@ -72,10 +72,9 @@ describe('DELETE object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should delete a object uploaded in parts successfully', done => {
|
test('should delete a object uploaded in parts successfully', done => {
|
||||||
s3.deleteObject({ Bucket: bucketName, Key: objectName }, err => {
|
s3.deleteObject({ Bucket: bucketName, Key: objectName }, err => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -22,17 +22,16 @@ const etag = `"${etagTrim}"`;
|
||||||
const partSize = 1024 * 1024 * 5; // 5MB minumum required part size.
|
const partSize = 1024 * 1024 * 5; // 5MB minumum required part size.
|
||||||
|
|
||||||
function checkNoError(err) {
|
function checkNoError(err) {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkError(err, code) {
|
function checkError(err, code) {
|
||||||
assert.notEqual(err, null, 'Expected failure but got success');
|
expect(err).not.toEqual(null);
|
||||||
assert.strictEqual(err.code, code);
|
expect(err.code).toBe(code);
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkContentLength(contentLengthHeader, expectedSize) {
|
function checkContentLength(contentLengthHeader, expectedSize) {
|
||||||
assert.strictEqual(Number.parseInt(contentLengthHeader, 10), expectedSize);
|
expect(Number.parseInt(contentLengthHeader, 10)).toBe(expectedSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
function dateFromNow(diff) {
|
function dateFromNow(diff) {
|
||||||
|
@ -68,10 +67,7 @@ describe('GET object', () => {
|
||||||
checkContentLength(data.ContentLength, len);
|
checkContentLength(data.ContentLength, len);
|
||||||
const md5Hash = crypto.createHash('md5');
|
const md5Hash = crypto.createHash('md5');
|
||||||
const md5HashExpected = crypto.createHash('md5');
|
const md5HashExpected = crypto.createHash('md5');
|
||||||
assert.strictEqual(
|
expect(md5Hash.update(data.Body).digest('hex')).toBe(md5HashExpected.update(body).digest('hex'));
|
||||||
md5Hash.update(data.Body).digest('hex'),
|
|
||||||
md5HashExpected.update(body).digest('hex')
|
|
||||||
);
|
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -186,14 +182,14 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
// Create a bucket to put object to get later
|
// Create a bucket to put object to get later
|
||||||
s3.createBucket({ Bucket: bucketName }, done);
|
s3.createBucket({ Bucket: bucketName }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
after(done => {
|
afterAll(done => {
|
||||||
s3.deleteObject({ Bucket: bucketName, Key: objectName }, err => {
|
s3.deleteObject({ Bucket: bucketName, Key: objectName }, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
|
@ -203,22 +199,21 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should return an error to get request without a valid bucket name',
|
test(
|
||||||
|
'should return an error to get request without a valid bucket name',
|
||||||
done => {
|
done => {
|
||||||
s3.getObject({ Bucket: '', Key: 'somekey' }, err => {
|
s3.getObject({ Bucket: '', Key: 'somekey' }, err => {
|
||||||
assert.notEqual(err, null,
|
expect(err).not.toEqual(null);
|
||||||
'Expected failure but got success');
|
expect(err.code).toBe('MethodNotAllowed');
|
||||||
assert.strictEqual(err.code, 'MethodNotAllowed');
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return NoSuchKey error when no such object',
|
test('should return NoSuchKey error when no such object', done => {
|
||||||
done => {
|
|
||||||
s3.getObject({ Bucket: bucketName, Key: 'nope' }, err => {
|
s3.getObject({ Bucket: bucketName, Key: 'nope' }, err => {
|
||||||
assert.notEqual(err, null,
|
expect(err).not.toEqual(null);
|
||||||
'Expected failure but got success');
|
expect(err.code).toBe('NoSuchKey');
|
||||||
assert.strictEqual(err.code, 'NoSuchKey');
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -226,7 +221,7 @@ describe('GET object', () => {
|
||||||
describe('Additional headers: [Cache-Control, Content-Disposition, ' +
|
describe('Additional headers: [Cache-Control, Content-Disposition, ' +
|
||||||
'Content-Encoding, Expires, Accept-Ranges]', () => {
|
'Content-Encoding, Expires, Accept-Ranges]', () => {
|
||||||
describe('if specified in put object request', () => {
|
describe('if specified in put object request', () => {
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
Key: objectName,
|
Key: objectName,
|
||||||
|
@ -238,36 +233,32 @@ describe('GET object', () => {
|
||||||
};
|
};
|
||||||
s3.putObject(params, err => done(err));
|
s3.putObject(params, err => done(err));
|
||||||
});
|
});
|
||||||
it('should return additional headers', done => {
|
test('should return additional headers', done => {
|
||||||
s3.getObject({ Bucket: bucketName, Key: objectName },
|
s3.getObject({ Bucket: bucketName, Key: objectName },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.CacheControl,
|
expect(res.CacheControl).toBe(cacheControl);
|
||||||
cacheControl);
|
expect(res.ContentDisposition).toBe(contentDisposition);
|
||||||
assert.strictEqual(res.ContentDisposition,
|
|
||||||
contentDisposition);
|
|
||||||
// Should remove V4 streaming value 'aws-chunked'
|
// Should remove V4 streaming value 'aws-chunked'
|
||||||
// to be compatible with AWS behavior
|
// to be compatible with AWS behavior
|
||||||
assert.strictEqual(res.ContentEncoding,
|
expect(res.ContentEncoding).toBe('gzip');
|
||||||
'gzip');
|
expect(res.ContentType).toBe(contentType);
|
||||||
assert.strictEqual(res.ContentType, contentType);
|
expect(res.Expires).toBe(new Date(expires).toGMTString());
|
||||||
assert.strictEqual(res.Expires,
|
expect(res.AcceptRanges).toBe('bytes');
|
||||||
new Date(expires).toGMTString());
|
|
||||||
assert.strictEqual(res.AcceptRanges, 'bytes');
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('if response content headers are set in query', () => {
|
describe('if response content headers are set in query', () => {
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
s3.putObject({ Bucket: bucketName, Key: objectName },
|
s3.putObject({ Bucket: bucketName, Key: objectName },
|
||||||
err => done(err));
|
err => done(err));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return additional headers even if not set in ' +
|
test('should return additional headers even if not set in ' +
|
||||||
'put object request', done => {
|
'put object request', done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
|
@ -283,16 +274,12 @@ describe('GET object', () => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.CacheControl,
|
expect(res.CacheControl).toBe(cacheControl);
|
||||||
cacheControl);
|
expect(res.ContentDisposition).toBe(contentDisposition);
|
||||||
assert.strictEqual(res.ContentDisposition,
|
expect(res.ContentEncoding).toBe(contentEncoding);
|
||||||
contentDisposition);
|
expect(res.ContentLanguage).toBe(contentLanguage);
|
||||||
assert.strictEqual(res.ContentEncoding,
|
expect(res.ContentType).toBe(contentType);
|
||||||
contentEncoding);
|
expect(res.Expires).toBe(expires);
|
||||||
assert.strictEqual(res.ContentLanguage,
|
|
||||||
contentLanguage);
|
|
||||||
assert.strictEqual(res.ContentType, contentType);
|
|
||||||
assert.strictEqual(res.Expires, expires);
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -300,7 +287,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('x-amz-website-redirect-location header', () => {
|
describe('x-amz-website-redirect-location header', () => {
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
Key: objectName,
|
Key: objectName,
|
||||||
|
@ -308,14 +295,14 @@ describe('GET object', () => {
|
||||||
};
|
};
|
||||||
s3.putObject(params, err => done(err));
|
s3.putObject(params, err => done(err));
|
||||||
});
|
});
|
||||||
it('should return website redirect header if specified in ' +
|
test('should return website redirect header if specified in ' +
|
||||||
'objectPUT request', done => {
|
'objectPUT request', done => {
|
||||||
s3.getObject({ Bucket: bucketName, Key: objectName },
|
s3.getObject({ Bucket: bucketName, Key: objectName },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.WebsiteRedirectLocation, '/');
|
expect(res.WebsiteRedirectLocation).toBe('/');
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -342,14 +329,13 @@ describe('GET object', () => {
|
||||||
s3.putObject(params, done);
|
s3.putObject(params, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not return "x-amz-tagging-count" if no tag ' +
|
test('should not return "x-amz-tagging-count" if no tag ' +
|
||||||
'associated with the object',
|
'associated with the object', done => {
|
||||||
done => {
|
|
||||||
s3.getObject(params, (err, data) => {
|
s3.getObject(params, (err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.TagCount, undefined);
|
expect(data.TagCount).toBe(undefined);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -358,14 +344,13 @@ describe('GET object', () => {
|
||||||
beforeEach(done => {
|
beforeEach(done => {
|
||||||
s3.putObjectTagging(paramsTagging, done);
|
s3.putObjectTagging(paramsTagging, done);
|
||||||
});
|
});
|
||||||
it('should return "x-amz-tagging-count" header that provides ' +
|
test('should return "x-amz-tagging-count" header that provides ' +
|
||||||
'the count of number of tags associated with the object',
|
'the count of number of tags associated with the object', done => {
|
||||||
done => {
|
|
||||||
s3.getObject(params, (err, data) => {
|
s3.getObject(params, (err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
assert.equal(data.TagCount, 1);
|
expect(data.TagCount).toEqual(1);
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -377,18 +362,16 @@ describe('GET object', () => {
|
||||||
beforeEach(done => {
|
beforeEach(done => {
|
||||||
s3.putObject(params, done);
|
s3.putObject(params, done);
|
||||||
});
|
});
|
||||||
it('If-Match: returns no error when ETag match, with double ' +
|
test('If-Match: returns no error when ETag match, with double ' +
|
||||||
'quotes around ETag',
|
'quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestGet({ IfMatch: etag }, err => {
|
requestGet({ IfMatch: etag }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns no error when one of ETags match, with ' +
|
test('If-Match: returns no error when one of ETags match, with ' +
|
||||||
'double quotes around ETag',
|
'double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestGet({ IfMatch:
|
requestGet({ IfMatch:
|
||||||
`non-matching,${etag}` }, err => {
|
`non-matching,${etag}` }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
@ -396,18 +379,16 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns no error when ETag match, without double ' +
|
test('If-Match: returns no error when ETag match, without double ' +
|
||||||
'quotes around ETag',
|
'quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestGet({ IfMatch: etagTrim }, err => {
|
requestGet({ IfMatch: etagTrim }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns no error when one of ETags match, without ' +
|
test('If-Match: returns no error when one of ETags match, without ' +
|
||||||
'double quotes around ETag',
|
'double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestGet({ IfMatch:
|
requestGet({ IfMatch:
|
||||||
`non-matching,${etagTrim}` }, err => {
|
`non-matching,${etagTrim}` }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
@ -415,15 +396,14 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns no error when ETag match with *', done => {
|
test('If-Match: returns no error when ETag match with *', done => {
|
||||||
requestGet({ IfMatch: '*' }, err => {
|
requestGet({ IfMatch: '*' }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns PreconditionFailed when ETag does not match',
|
test('If-Match: returns PreconditionFailed when ETag does not match', done => {
|
||||||
done => {
|
|
||||||
requestGet({
|
requestGet({
|
||||||
IfMatch: 'non-matching ETag',
|
IfMatch: 'non-matching ETag',
|
||||||
}, err => {
|
}, err => {
|
||||||
|
@ -432,16 +412,14 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns no error when ETag does not match',
|
test('If-None-Match: returns no error when ETag does not match', done => {
|
||||||
done => {
|
|
||||||
requestGet({ IfNoneMatch: 'non-matching' }, err => {
|
requestGet({ IfNoneMatch: 'non-matching' }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns no error when all ETags do not match',
|
test('If-None-Match: returns no error when all ETags do not match', done => {
|
||||||
done => {
|
|
||||||
requestGet({
|
requestGet({
|
||||||
IfNoneMatch: 'non-matching,' +
|
IfNoneMatch: 'non-matching,' +
|
||||||
'non-matching-either',
|
'non-matching-either',
|
||||||
|
@ -451,18 +429,16 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns NotModified when ETag match, with ' +
|
test('If-None-Match: returns NotModified when ETag match, with ' +
|
||||||
'double quotes around ETag',
|
'double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestGet({ IfNoneMatch: etag }, err => {
|
requestGet({ IfNoneMatch: etag }, err => {
|
||||||
checkError(err, 'NotModified');
|
checkError(err, 'NotModified');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns NotModified when one of ETags match, ' +
|
test('If-None-Match: returns NotModified when one of ETags match, ' +
|
||||||
'with double quotes around ETag',
|
'with double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestGet({
|
requestGet({
|
||||||
IfNoneMatch: `non-matching,${etag}`,
|
IfNoneMatch: `non-matching,${etag}`,
|
||||||
}, err => {
|
}, err => {
|
||||||
|
@ -471,8 +447,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns NotModified when value is "*"',
|
test('If-None-Match: returns NotModified when value is "*"', done => {
|
||||||
done => {
|
|
||||||
requestGet({
|
requestGet({
|
||||||
IfNoneMatch: '*',
|
IfNoneMatch: '*',
|
||||||
}, err => {
|
}, err => {
|
||||||
|
@ -481,18 +456,16 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns NotModified when ETag match, without ' +
|
test('If-None-Match: returns NotModified when ETag match, without ' +
|
||||||
'double quotes around ETag',
|
'double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestGet({ IfNoneMatch: etagTrim }, err => {
|
requestGet({ IfNoneMatch: etagTrim }, err => {
|
||||||
checkError(err, 'NotModified');
|
checkError(err, 'NotModified');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns NotModified when one of ETags match, ' +
|
test('If-None-Match: returns NotModified when one of ETags match, ' +
|
||||||
'without double quotes around ETag',
|
'without double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestGet({
|
requestGet({
|
||||||
IfNoneMatch: `non-matching,${etagTrim}`,
|
IfNoneMatch: `non-matching,${etagTrim}`,
|
||||||
}, err => {
|
}, err => {
|
||||||
|
@ -501,9 +474,8 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Modified-Since: returns no error if Last modified date is ' +
|
test('If-Modified-Since: returns no error if Last modified date is ' +
|
||||||
'greater',
|
'greater', done => {
|
||||||
done => {
|
|
||||||
requestGet({ IfModifiedSince: dateFromNow(-1) },
|
requestGet({ IfModifiedSince: dateFromNow(-1) },
|
||||||
err => {
|
err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
@ -513,9 +485,8 @@ describe('GET object', () => {
|
||||||
|
|
||||||
// Skipping this test, because real AWS does not provide error as
|
// Skipping this test, because real AWS does not provide error as
|
||||||
// expected
|
// expected
|
||||||
it.skip('If-Modified-Since: returns NotModified if Last modified ' +
|
test.skip('If-Modified-Since: returns NotModified if Last modified ' +
|
||||||
'date is lesser',
|
'date is lesser', done => {
|
||||||
done => {
|
|
||||||
requestGet({ IfModifiedSince: dateFromNow(1) },
|
requestGet({ IfModifiedSince: dateFromNow(1) },
|
||||||
err => {
|
err => {
|
||||||
checkError(err, 'NotModified');
|
checkError(err, 'NotModified');
|
||||||
|
@ -523,9 +494,8 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Modified-Since: returns NotModified if Last modified ' +
|
test('If-Modified-Since: returns NotModified if Last modified ' +
|
||||||
'date is equal',
|
'date is equal', done => {
|
||||||
done => {
|
|
||||||
s3.headObject({ Bucket: bucketName, Key: objectName },
|
s3.headObject({ Bucket: bucketName, Key: objectName },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
@ -537,9 +507,8 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Unmodified-Since: returns no error when lastModified date ' +
|
test('If-Unmodified-Since: returns no error when lastModified date ' +
|
||||||
'is greater',
|
'is greater', done => {
|
||||||
done => {
|
|
||||||
requestGet({ IfUnmodifiedSince: dateFromNow(1) },
|
requestGet({ IfUnmodifiedSince: dateFromNow(1) },
|
||||||
err => {
|
err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
@ -547,7 +516,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Unmodified-Since: returns no error when lastModified ' +
|
test('If-Unmodified-Since: returns no error when lastModified ' +
|
||||||
'date is equal', done => {
|
'date is equal', done => {
|
||||||
s3.headObject({ Bucket: bucketName, Key: objectName },
|
s3.headObject({ Bucket: bucketName, Key: objectName },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
|
@ -561,9 +530,8 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Unmodified-Since: returns PreconditionFailed when ' +
|
test('If-Unmodified-Since: returns PreconditionFailed when ' +
|
||||||
'lastModified date is lesser',
|
'lastModified date is lesser', done => {
|
||||||
done => {
|
|
||||||
requestGet({ IfUnmodifiedSince: dateFromNow(-1) },
|
requestGet({ IfUnmodifiedSince: dateFromNow(-1) },
|
||||||
err => {
|
err => {
|
||||||
checkError(err, 'PreconditionFailed');
|
checkError(err, 'PreconditionFailed');
|
||||||
|
@ -571,9 +539,8 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match & If-Unmodified-Since: returns no error when match ' +
|
test('If-Match & If-Unmodified-Since: returns no error when match ' +
|
||||||
'Etag and lastModified is greater',
|
'Etag and lastModified is greater', done => {
|
||||||
done => {
|
|
||||||
requestGet({
|
requestGet({
|
||||||
IfMatch: etagTrim,
|
IfMatch: etagTrim,
|
||||||
IfUnmodifiedSince: dateFromNow(-1),
|
IfUnmodifiedSince: dateFromNow(-1),
|
||||||
|
@ -583,7 +550,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match match & If-Unmodified-Since match', done => {
|
test('If-Match match & If-Unmodified-Since match', done => {
|
||||||
requestGet({
|
requestGet({
|
||||||
IfMatch: etagTrim,
|
IfMatch: etagTrim,
|
||||||
IfUnmodifiedSince: dateFromNow(1),
|
IfUnmodifiedSince: dateFromNow(1),
|
||||||
|
@ -593,7 +560,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match not match & If-Unmodified-Since not match', done => {
|
test('If-Match not match & If-Unmodified-Since not match', done => {
|
||||||
requestGet({
|
requestGet({
|
||||||
IfMatch: 'non-matching',
|
IfMatch: 'non-matching',
|
||||||
IfUnmodifiedSince: dateFromNow(-1),
|
IfUnmodifiedSince: dateFromNow(-1),
|
||||||
|
@ -603,7 +570,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match not match & If-Unmodified-Since match', done => {
|
test('If-Match not match & If-Unmodified-Since match', done => {
|
||||||
requestGet({
|
requestGet({
|
||||||
IfMatch: 'non-matching',
|
IfMatch: 'non-matching',
|
||||||
IfUnmodifiedSince: dateFromNow(1),
|
IfUnmodifiedSince: dateFromNow(1),
|
||||||
|
@ -615,7 +582,7 @@ describe('GET object', () => {
|
||||||
|
|
||||||
// Skipping this test, because real AWS does not provide error as
|
// Skipping this test, because real AWS does not provide error as
|
||||||
// expected
|
// expected
|
||||||
it.skip('If-Match match & If-Modified-Since not match', done => {
|
test.skip('If-Match match & If-Modified-Since not match', done => {
|
||||||
requestGet({
|
requestGet({
|
||||||
IfMatch: etagTrim,
|
IfMatch: etagTrim,
|
||||||
IfModifiedSince: dateFromNow(1),
|
IfModifiedSince: dateFromNow(1),
|
||||||
|
@ -625,7 +592,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match match & If-Modified-Since match', done => {
|
test('If-Match match & If-Modified-Since match', done => {
|
||||||
requestGet({
|
requestGet({
|
||||||
IfMatch: etagTrim,
|
IfMatch: etagTrim,
|
||||||
IfModifiedSince: dateFromNow(-1),
|
IfModifiedSince: dateFromNow(-1),
|
||||||
|
@ -635,7 +602,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match not match & If-Modified-Since not match', done => {
|
test('If-Match not match & If-Modified-Since not match', done => {
|
||||||
requestGet({
|
requestGet({
|
||||||
IfMatch: 'non-matching',
|
IfMatch: 'non-matching',
|
||||||
IfModifiedSince: dateFromNow(1),
|
IfModifiedSince: dateFromNow(1),
|
||||||
|
@ -645,7 +612,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match not match & If-Modified-Since match', done => {
|
test('If-Match not match & If-Modified-Since match', done => {
|
||||||
requestGet({
|
requestGet({
|
||||||
IfMatch: 'non-matching',
|
IfMatch: 'non-matching',
|
||||||
IfModifiedSince: dateFromNow(-1),
|
IfModifiedSince: dateFromNow(-1),
|
||||||
|
@ -655,9 +622,8 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match & If-Modified-Since: returns NotModified when ' +
|
test('If-None-Match & If-Modified-Since: returns NotModified when ' +
|
||||||
'Etag does not match and lastModified is greater',
|
'Etag does not match and lastModified is greater', done => {
|
||||||
done => {
|
|
||||||
requestGet({
|
requestGet({
|
||||||
IfNoneMatch: etagTrim,
|
IfNoneMatch: etagTrim,
|
||||||
IfModifiedSince: dateFromNow(-1),
|
IfModifiedSince: dateFromNow(-1),
|
||||||
|
@ -667,8 +633,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match not match & If-Modified-Since not match',
|
test('If-None-Match not match & If-Modified-Since not match', done => {
|
||||||
done => {
|
|
||||||
requestGet({
|
requestGet({
|
||||||
IfNoneMatch: etagTrim,
|
IfNoneMatch: etagTrim,
|
||||||
IfModifiedSince: dateFromNow(1),
|
IfModifiedSince: dateFromNow(1),
|
||||||
|
@ -678,7 +643,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match match & If-Modified-Since match', done => {
|
test('If-None-Match match & If-Modified-Since match', done => {
|
||||||
requestGet({
|
requestGet({
|
||||||
IfNoneMatch: 'non-matching',
|
IfNoneMatch: 'non-matching',
|
||||||
IfModifiedSince: dateFromNow(-1),
|
IfModifiedSince: dateFromNow(-1),
|
||||||
|
@ -690,8 +655,7 @@ describe('GET object', () => {
|
||||||
|
|
||||||
// Skipping this test, because real AWS does not provide error as
|
// Skipping this test, because real AWS does not provide error as
|
||||||
// expected
|
// expected
|
||||||
it.skip('If-None-Match match & If-Modified-Since not match',
|
test.skip('If-None-Match match & If-Modified-Since not match', done => {
|
||||||
done => {
|
|
||||||
requestGet({
|
requestGet({
|
||||||
IfNoneMatch: 'non-matching',
|
IfNoneMatch: 'non-matching',
|
||||||
IfModifiedSince: dateFromNow(1),
|
IfModifiedSince: dateFromNow(1),
|
||||||
|
@ -701,7 +665,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match match & If-Unmodified-Since match', done => {
|
test('If-None-Match match & If-Unmodified-Since match', done => {
|
||||||
requestGet({
|
requestGet({
|
||||||
IfNoneMatch: 'non-matching',
|
IfNoneMatch: 'non-matching',
|
||||||
IfUnmodifiedSince: dateFromNow(1),
|
IfUnmodifiedSince: dateFromNow(1),
|
||||||
|
@ -711,7 +675,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match match & If-Unmodified-Since not match', done => {
|
test('If-None-Match match & If-Unmodified-Since not match', done => {
|
||||||
requestGet({
|
requestGet({
|
||||||
IfNoneMatch: 'non-matching',
|
IfNoneMatch: 'non-matching',
|
||||||
IfUnmodifiedSince: dateFromNow(-1),
|
IfUnmodifiedSince: dateFromNow(-1),
|
||||||
|
@ -721,7 +685,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match not match & If-Unmodified-Since match', done => {
|
test('If-None-Match not match & If-Unmodified-Since match', done => {
|
||||||
requestGet({
|
requestGet({
|
||||||
IfNoneMatch: etagTrim,
|
IfNoneMatch: etagTrim,
|
||||||
IfUnmodifiedSince: dateFromNow(1),
|
IfUnmodifiedSince: dateFromNow(1),
|
||||||
|
@ -731,8 +695,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match not match & If-Unmodified-Since not match',
|
test('If-None-Match not match & If-Unmodified-Since not match', done => {
|
||||||
done => {
|
|
||||||
requestGet({
|
requestGet({
|
||||||
IfNoneMatch: etagTrim,
|
IfNoneMatch: etagTrim,
|
||||||
IfUnmodifiedSince: dateFromNow(-1),
|
IfUnmodifiedSince: dateFromNow(-1),
|
||||||
|
@ -749,7 +712,8 @@ describe('GET object', () => {
|
||||||
const invalidPartNumbers = [-1, 0, 10001];
|
const invalidPartNumbers = [-1, 0, 10001];
|
||||||
|
|
||||||
orderedPartNumbers.forEach(num =>
|
orderedPartNumbers.forEach(num =>
|
||||||
it(`should get the body of part ${num} when ordered MPU`,
|
test(
|
||||||
|
`should get the body of part ${num} when ordered MPU`,
|
||||||
done => completeMPU(orderedPartNumbers, err => {
|
done => completeMPU(orderedPartNumbers, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
return requestGet({ PartNumber: num }, (err, data) => {
|
return requestGet({ PartNumber: num }, (err, data) => {
|
||||||
|
@ -758,17 +722,16 @@ describe('GET object', () => {
|
||||||
const md5Hash = crypto.createHash('md5');
|
const md5Hash = crypto.createHash('md5');
|
||||||
const md5HashExpected = crypto.createHash('md5');
|
const md5HashExpected = crypto.createHash('md5');
|
||||||
const expected = Buffer.alloc(partSize).fill(num);
|
const expected = Buffer.alloc(partSize).fill(num);
|
||||||
assert.strictEqual(
|
expect(md5Hash.update(data.Body).digest('hex')).toBe(md5HashExpected.update(expected).digest('hex'));
|
||||||
md5Hash.update(data.Body).digest('hex'),
|
|
||||||
md5HashExpected.update(expected).digest('hex')
|
|
||||||
);
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
})));
|
})
|
||||||
|
));
|
||||||
|
|
||||||
// Use the orderedPartNumbers to retrieve parts with GetObject.
|
// Use the orderedPartNumbers to retrieve parts with GetObject.
|
||||||
orderedPartNumbers.forEach(num =>
|
orderedPartNumbers.forEach(num =>
|
||||||
it(`should get the body of part ${num} when unordered MPU`,
|
test(
|
||||||
|
`should get the body of part ${num} when unordered MPU`,
|
||||||
done => completeMPU(unOrderedPartNumbers, err => {
|
done => completeMPU(unOrderedPartNumbers, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
return requestGet({ PartNumber: num }, (err, data) => {
|
return requestGet({ PartNumber: num }, (err, data) => {
|
||||||
|
@ -778,25 +741,25 @@ describe('GET object', () => {
|
||||||
const md5HashExpected = crypto.createHash('md5');
|
const md5HashExpected = crypto.createHash('md5');
|
||||||
const expected = Buffer.alloc(partSize)
|
const expected = Buffer.alloc(partSize)
|
||||||
.fill(unOrderedPartNumbers[num - 1]);
|
.fill(unOrderedPartNumbers[num - 1]);
|
||||||
assert.strictEqual(
|
expect(md5Hash.update(data.Body).digest('hex')).toBe(md5HashExpected.update(expected).digest('hex'));
|
||||||
md5Hash.update(data.Body).digest('hex'),
|
|
||||||
md5HashExpected.update(expected).digest('hex')
|
|
||||||
);
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
})));
|
})
|
||||||
|
));
|
||||||
|
|
||||||
invalidPartNumbers.forEach(num =>
|
invalidPartNumbers.forEach(num =>
|
||||||
it(`should not accept a partNumber that is not 1-10000: ${num}`,
|
test(
|
||||||
|
`should not accept a partNumber that is not 1-10000: ${num}`,
|
||||||
done => completeMPU(orderedPartNumbers, err => {
|
done => completeMPU(orderedPartNumbers, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
return requestGet({ PartNumber: num }, err => {
|
return requestGet({ PartNumber: num }, err => {
|
||||||
checkError(err, 'InvalidArgument');
|
checkError(err, 'InvalidArgument');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
})));
|
})
|
||||||
|
));
|
||||||
|
|
||||||
it('should not accept a part number greater than the total parts ' +
|
test('should not accept a part number greater than the total parts ' +
|
||||||
'uploaded for an MPU', done =>
|
'uploaded for an MPU', done =>
|
||||||
completeMPU(orderedPartNumbers, err => {
|
completeMPU(orderedPartNumbers, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
@ -806,7 +769,8 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
}));
|
}));
|
||||||
|
|
||||||
it('should accept a part number of 1 for regular put object',
|
test(
|
||||||
|
'should accept a part number of 1 for regular put object',
|
||||||
done => s3.putObject({
|
done => s3.putObject({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
Key: objectName,
|
Key: objectName,
|
||||||
|
@ -817,15 +781,13 @@ describe('GET object', () => {
|
||||||
const md5Hash = crypto.createHash('md5');
|
const md5Hash = crypto.createHash('md5');
|
||||||
const md5HashExpected = crypto.createHash('md5');
|
const md5HashExpected = crypto.createHash('md5');
|
||||||
const expected = Buffer.alloc(10);
|
const expected = Buffer.alloc(10);
|
||||||
assert.strictEqual(
|
expect(md5Hash.update(data.Body).digest('hex')).toBe(md5HashExpected.update(expected).digest('hex'));
|
||||||
md5Hash.update(data.Body).digest('hex'),
|
|
||||||
md5HashExpected.update(expected).digest('hex')
|
|
||||||
);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
}));
|
})
|
||||||
|
);
|
||||||
|
|
||||||
it('should accept a part number that is a string', done =>
|
test('should accept a part number that is a string', done =>
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
Key: objectName,
|
Key: objectName,
|
||||||
|
@ -837,15 +799,12 @@ describe('GET object', () => {
|
||||||
const md5Hash = crypto.createHash('md5');
|
const md5Hash = crypto.createHash('md5');
|
||||||
const md5HashExpected = crypto.createHash('md5');
|
const md5HashExpected = crypto.createHash('md5');
|
||||||
const expected = Buffer.alloc(10);
|
const expected = Buffer.alloc(10);
|
||||||
assert.strictEqual(
|
expect(md5Hash.update(data.Body).digest('hex')).toBe(md5HashExpected.update(expected).digest('hex'));
|
||||||
md5Hash.update(data.Body).digest('hex'),
|
|
||||||
md5HashExpected.update(expected).digest('hex')
|
|
||||||
);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
}));
|
}));
|
||||||
|
|
||||||
it('should not accept a part number greater than 1 for regular ' +
|
test('should not accept a part number greater than 1 for regular ' +
|
||||||
'put object', done =>
|
'put object', done =>
|
||||||
s3.putObject({
|
s3.putObject({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
|
@ -859,7 +818,7 @@ describe('GET object', () => {
|
||||||
});
|
});
|
||||||
}));
|
}));
|
||||||
|
|
||||||
it('should not accept both PartNumber and Range as params', done =>
|
test('should not accept both PartNumber and Range as params', done =>
|
||||||
completeMPU(orderedPartNumbers, err => {
|
completeMPU(orderedPartNumbers, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
return requestGet({
|
return requestGet({
|
||||||
|
@ -907,13 +866,15 @@ describe('GET object', () => {
|
||||||
Key: copyPartKey,
|
Key: copyPartKey,
|
||||||
}, done));
|
}, done));
|
||||||
|
|
||||||
it('should retrieve a part copied from an MPU', done =>
|
test('should retrieve a part copied from an MPU', done =>
|
||||||
checkGetObjectPart(copyPartKey, 1, partOneSize, partOneBody,
|
checkGetObjectPart(copyPartKey, 1, partOneSize, partOneBody,
|
||||||
done));
|
done));
|
||||||
|
|
||||||
it('should retrieve a part put after part copied from MPU',
|
test(
|
||||||
|
'should retrieve a part put after part copied from MPU',
|
||||||
done => checkGetObjectPart(copyPartKey, 2, partSize,
|
done => checkGetObjectPart(copyPartKey, 2, partSize,
|
||||||
partTwoBody, done));
|
partTwoBody, done)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('uploadPartCopy overwrite', () => {
|
describe('uploadPartCopy overwrite', () => {
|
||||||
|
@ -980,35 +941,33 @@ describe('GET object', () => {
|
||||||
Key: copyPartKey,
|
Key: copyPartKey,
|
||||||
}, done));
|
}, done));
|
||||||
|
|
||||||
it('should retrieve a part that overwrote another part ' +
|
test('should retrieve a part that overwrote another part ' +
|
||||||
'originally copied from an MPU', done =>
|
'originally copied from an MPU', done =>
|
||||||
checkGetObjectPart(copyPartKey, 1, partSize, partOneBody,
|
checkGetObjectPart(copyPartKey, 1, partSize, partOneBody,
|
||||||
done));
|
done));
|
||||||
|
|
||||||
it('should retrieve a part copied from an MPU after the ' +
|
test('should retrieve a part copied from an MPU after the ' +
|
||||||
'original part was overwritten',
|
'original part was overwritten', done => checkGetObjectPart(copyPartKey, 2, partTwoSize,
|
||||||
done => checkGetObjectPart(copyPartKey, 2, partTwoSize,
|
|
||||||
partTwoBody, done));
|
partTwoBody, done));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('absent x-amz-website-redirect-location header', () => {
|
describe('absent x-amz-website-redirect-location header', () => {
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
Key: objectName,
|
Key: objectName,
|
||||||
};
|
};
|
||||||
s3.putObject(params, err => done(err));
|
s3.putObject(params, err => done(err));
|
||||||
});
|
});
|
||||||
it('should return website redirect header if specified in ' +
|
test('should return website redirect header if specified in ' +
|
||||||
'objectPUT request', done => {
|
'objectPUT request', done => {
|
||||||
s3.getObject({ Bucket: bucketName, Key: objectName },
|
s3.getObject({ Bucket: bucketName, Key: objectName },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.WebsiteRedirectLocation,
|
expect(res.WebsiteRedirectLocation).toBe(undefined);
|
||||||
undefined);
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -19,7 +19,7 @@ describe('GET multipart upload object [Cache-Control, Content-Disposition, ' +
|
||||||
// a UNIX timestamp for Expires header
|
// a UNIX timestamp for Expires header
|
||||||
const expires = new Date();
|
const expires = new Date();
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
Key: objectName,
|
Key: objectName,
|
||||||
|
@ -58,7 +58,7 @@ describe('GET multipart upload object [Cache-Control, Content-Disposition, ' +
|
||||||
throw err;
|
throw err;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('Emptying bucket\n');
|
process.stdout.write('Emptying bucket\n');
|
||||||
return bucketUtil.empty(bucketName)
|
return bucketUtil.empty(bucketName)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
|
@ -70,9 +70,8 @@ describe('GET multipart upload object [Cache-Control, Content-Disposition, ' +
|
||||||
throw err;
|
throw err;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should return additional headers when get request is performed ' +
|
test('should return additional headers when get request is performed ' +
|
||||||
'on MPU, when they are specified in creation of MPU',
|
'on MPU, when they are specified in creation of MPU', () => {
|
||||||
() => {
|
|
||||||
const params = { Bucket: bucketName, Key: 'key', PartNumber: 1,
|
const params = { Bucket: bucketName, Key: 'key', PartNumber: 1,
|
||||||
UploadId: uploadId };
|
UploadId: uploadId };
|
||||||
return s3.uploadPartAsync(params)
|
return s3.uploadPartAsync(params)
|
||||||
|
@ -108,10 +107,10 @@ describe('GET multipart upload object [Cache-Control, Content-Disposition, ' +
|
||||||
throw err;
|
throw err;
|
||||||
})
|
})
|
||||||
.then(res => {
|
.then(res => {
|
||||||
assert.strictEqual(res.CacheControl, cacheControl);
|
expect(res.CacheControl).toBe(cacheControl);
|
||||||
assert.strictEqual(res.ContentDisposition, contentDisposition);
|
expect(res.ContentDisposition).toBe(contentDisposition);
|
||||||
assert.strictEqual(res.ContentEncoding, 'gzip');
|
expect(res.ContentEncoding).toBe('gzip');
|
||||||
assert.strictEqual(res.Expires, expires.toGMTString());
|
expect(res.Expires).toBe(expires.toGMTString());
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -20,9 +20,9 @@ const taggingConfig = { TagSet: [
|
||||||
] };
|
] };
|
||||||
|
|
||||||
function _checkError(err, code, statusCode) {
|
function _checkError(err, code, statusCode) {
|
||||||
assert(err, 'Expected error but found none');
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, code);
|
expect(err.code).toBe(code);
|
||||||
assert.strictEqual(err.statusCode, statusCode);
|
expect(err.statusCode).toBe(statusCode);
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('GET object taggings', () => {
|
describe('GET object taggings', () => {
|
||||||
|
@ -54,7 +54,7 @@ describe('GET object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return appropriate tags after putting tags', done => {
|
test('should return appropriate tags after putting tags', done => {
|
||||||
s3.putObjectTagging({
|
s3.putObjectTagging({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
Key: objectName,
|
Key: objectName,
|
||||||
|
@ -70,7 +70,7 @@ describe('GET object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return no tag after putting and deleting tags', done => {
|
test('should return no tag after putting and deleting tags', done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.putObjectTagging({
|
next => s3.putObjectTagging({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
|
@ -88,7 +88,7 @@ describe('GET object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return empty array after putting no tag', done => {
|
test('should return empty array after putting no tag', done => {
|
||||||
s3.getObjectTagging({ Bucket: bucketName, Key: objectName },
|
s3.getObjectTagging({ Bucket: bucketName, Key: objectName },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
assert.ifError(err, `getObjectTagging error: ${err}`);
|
assert.ifError(err, `getObjectTagging error: ${err}`);
|
||||||
|
@ -97,8 +97,7 @@ describe('GET object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return NoSuchKey getting tag to a non-existing object',
|
test('should return NoSuchKey getting tag to a non-existing object', done => {
|
||||||
done => {
|
|
||||||
s3.getObjectTagging({
|
s3.getObjectTagging({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
Key: 'nonexisting',
|
Key: 'nonexisting',
|
||||||
|
@ -108,18 +107,19 @@ describe('GET object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 AccessDenied getting tag with another account',
|
test(
|
||||||
|
'should return 403 AccessDenied getting tag with another account',
|
||||||
done => {
|
done => {
|
||||||
otherAccountS3.getObjectTagging({ Bucket: bucketName, Key:
|
otherAccountS3.getObjectTagging({ Bucket: bucketName, Key:
|
||||||
objectName }, err => {
|
objectName }, err => {
|
||||||
_checkError(err, 'AccessDenied', 403);
|
_checkError(err, 'AccessDenied', 403);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return 403 AccessDenied getting tag with a different ' +
|
test('should return 403 AccessDenied getting tag with a different ' +
|
||||||
'account to an object with ACL "public-read-write"',
|
'account to an object with ACL "public-read-write"', done => {
|
||||||
done => {
|
|
||||||
s3.putObjectAcl({ Bucket: bucketName, Key: objectName,
|
s3.putObjectAcl({ Bucket: bucketName, Key: objectName,
|
||||||
ACL: 'public-read-write' }, err => {
|
ACL: 'public-read-write' }, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -133,9 +133,8 @@ describe('GET object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 AccessDenied getting tag to an object ' +
|
test('should return 403 AccessDenied getting tag to an object ' +
|
||||||
'in a bucket created with a different account',
|
'in a bucket created with a different account', done => {
|
||||||
done => {
|
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.putBucketAcl({ Bucket: bucketName, ACL:
|
next => s3.putBucketAcl({ Bucket: bucketName, ACL:
|
||||||
'public-read-write' }, err => next(err)),
|
'public-read-write' }, err => next(err)),
|
||||||
|
@ -149,7 +148,7 @@ describe('GET object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get tag to an object in a bucket created with same ' +
|
test('should get tag to an object in a bucket created with same ' +
|
||||||
'account', done => {
|
'account', done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.putBucketAcl({ Bucket: bucketName, ACL:
|
next => s3.putBucketAcl({ Bucket: bucketName, ACL:
|
||||||
|
|
|
@ -14,8 +14,8 @@ const endRangeTest = (inputRange, expectedRange, cb) => {
|
||||||
};
|
};
|
||||||
|
|
||||||
s3.getObject(params, (err, data) => {
|
s3.getObject(params, (err, data) => {
|
||||||
assert.strictEqual(data.ContentLength, '90');
|
expect(data.ContentLength).toBe('90');
|
||||||
assert.strictEqual(data.ContentRange, expectedRange);
|
expect(data.ContentRange).toBe(expectedRange);
|
||||||
assert.deepStrictEqual(data.Body, Buffer.allocUnsafe(90).fill(1));
|
assert.deepStrictEqual(data.Body, Buffer.allocUnsafe(90).fill(1));
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
|
@ -58,15 +58,11 @@ describe('aws-node-sdk range test of large end position', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should get the final 90 bytes of a 2890 byte object for a byte ' +
|
test('should get the final 90 bytes of a 2890 byte object for a byte ' +
|
||||||
'range of 2800-',
|
'range of 2800-', done => endRangeTest('bytes=2800-', 'bytes 2800-2889/2890', done));
|
||||||
done => endRangeTest('bytes=2800-', 'bytes 2800-2889/2890', done)
|
|
||||||
);
|
|
||||||
|
|
||||||
it('should get the final 90 bytes of a 2890 byte object for a byte ' +
|
test('should get the final 90 bytes of a 2890 byte object for a byte ' +
|
||||||
'range of 2800-Number.MAX_SAFE_INTEGER',
|
'range of 2800-Number.MAX_SAFE_INTEGER', done => endRangeTest(`bytes=2800-${Number.MAX_SAFE_INTEGER}`,
|
||||||
done => endRangeTest(`bytes=2800-${Number.MAX_SAFE_INTEGER}`,
|
'bytes 2800-2889/2890', done));
|
||||||
'bytes 2800-2889/2890', done)
|
|
||||||
);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -26,25 +26,26 @@ describe('Initiate MPU', () => {
|
||||||
|
|
||||||
afterEach(() => bucketUtil.deleteOne(bucket));
|
afterEach(() => bucketUtil.deleteOne(bucket));
|
||||||
|
|
||||||
it('should return InvalidRedirectLocation if initiate MPU ' +
|
test('should return InvalidRedirectLocation if initiate MPU ' +
|
||||||
'with x-amz-website-redirect-location header that does not start ' +
|
'with x-amz-website-redirect-location header that does not start ' +
|
||||||
'with \'http://\', \'https://\' or \'/\'', done => {
|
'with \'http://\', \'https://\' or \'/\'', done => {
|
||||||
const params = { Bucket: bucket, Key: key,
|
const params = { Bucket: bucket, Key: key,
|
||||||
WebsiteRedirectLocation: 'google.com' };
|
WebsiteRedirectLocation: 'google.com' };
|
||||||
s3.createMultipartUpload(params, err => {
|
s3.createMultipartUpload(params, err => {
|
||||||
assert.strictEqual(err.code, 'InvalidRedirectLocation');
|
expect(err.code).toBe('InvalidRedirectLocation');
|
||||||
assert.strictEqual(err.statusCode, 400);
|
expect(err.statusCode).toBe(400);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if initiating MPU w/ > 2KB user-defined md',
|
test(
|
||||||
|
'should return error if initiating MPU w/ > 2KB user-defined md',
|
||||||
done => {
|
done => {
|
||||||
const metadata = genMaxSizeMetaHeaders();
|
const metadata = genMaxSizeMetaHeaders();
|
||||||
const params = { Bucket: bucket, Key: key, Metadata: metadata };
|
const params = { Bucket: bucket, Key: key, Metadata: metadata };
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.createMultipartUpload(params, (err, data) => {
|
next => s3.createMultipartUpload(params, (err, data) => {
|
||||||
assert.strictEqual(err, null, `Unexpected err: ${err}`);
|
expect(err).toBe(null);
|
||||||
next(null, data.UploadId);
|
next(null, data.UploadId);
|
||||||
}),
|
}),
|
||||||
(uploadId, next) => s3.abortMultipartUpload({
|
(uploadId, next) => s3.abortMultipartUpload({
|
||||||
|
@ -52,18 +53,19 @@ describe('Initiate MPU', () => {
|
||||||
Key: key,
|
Key: key,
|
||||||
UploadId: uploadId,
|
UploadId: uploadId,
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.strictEqual(err, null, `Unexpected err: ${err}`);
|
expect(err).toBe(null);
|
||||||
// add one more byte to push over limit for next call
|
// add one more byte to push over limit for next call
|
||||||
metadata.header0 = `${metadata.header0}${'0'}`;
|
metadata.header0 = `${metadata.header0}${'0'}`;
|
||||||
next();
|
next();
|
||||||
}),
|
}),
|
||||||
next => s3.createMultipartUpload(params, next),
|
next => s3.createMultipartUpload(params, next),
|
||||||
], err => {
|
], err => {
|
||||||
assert(err, 'Expected err but did not find one');
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'MetadataTooLarge');
|
expect(err.code).toBe('MetadataTooLarge');
|
||||||
assert.strictEqual(err.statusCode, 400);
|
expect(err.statusCode).toBe(400);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -9,8 +9,7 @@ const bodyFirstPart = Buffer.allocUnsafe(10).fill(0);
|
||||||
const bodySecondPart = Buffer.allocUnsafe(20).fill(0);
|
const bodySecondPart = Buffer.allocUnsafe(20).fill(0);
|
||||||
|
|
||||||
function checkNoError(err) {
|
function checkNoError(err) {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('List parts', () => {
|
describe('List parts', () => {
|
||||||
|
@ -58,7 +57,7 @@ describe('List parts', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should only list the second part', done => {
|
test('should only list the second part', done => {
|
||||||
s3.listParts({
|
s3.listParts({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: key,
|
Key: key,
|
||||||
|
@ -66,9 +65,9 @@ describe('List parts', () => {
|
||||||
UploadId: uploadId },
|
UploadId: uploadId },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(data.Parts[0].PartNumber, 2);
|
expect(data.Parts[0].PartNumber).toBe(2);
|
||||||
assert.strictEqual(data.Parts[0].Size, 20);
|
expect(data.Parts[0].Size).toBe(20);
|
||||||
assert.strictEqual(`${data.Parts[0].ETag}`, secondEtag);
|
expect(`${data.Parts[0].ETag}`).toBe(secondEtag);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -111,7 +110,7 @@ function test(s3, bucket, key, uploadId, cb) {
|
||||||
UploadId: uploadId },
|
UploadId: uploadId },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(data.Key, key);
|
expect(data.Key).toBe(key);
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -128,13 +127,14 @@ describe('List parts - object keys with special characters: `&`', () => {
|
||||||
.then(res => {
|
.then(res => {
|
||||||
uploadId = res;
|
uploadId = res;
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
})
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
afterEach(() => deletePart(s3, bucketUtil, key, uploadId));
|
afterEach(() => deletePart(s3, bucketUtil, key, uploadId));
|
||||||
|
|
||||||
it('should list parts of an object with `&` in its key',
|
test(
|
||||||
done => test(s3, bucket, key, uploadId, done));
|
'should list parts of an object with `&` in its key',
|
||||||
|
done => test(s3, bucket, key, uploadId, done)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -150,13 +150,14 @@ describe('List parts - object keys with special characters: `"`', () => {
|
||||||
.then(res => {
|
.then(res => {
|
||||||
uploadId = res;
|
uploadId = res;
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
})
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
afterEach(() => deletePart(s3, bucketUtil, key, uploadId));
|
afterEach(() => deletePart(s3, bucketUtil, key, uploadId));
|
||||||
|
|
||||||
it('should list parts of an object with `"` in its key',
|
test(
|
||||||
done => test(s3, bucket, key, uploadId, done));
|
'should list parts of an object with `"` in its key',
|
||||||
|
done => test(s3, bucket, key, uploadId, done)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -172,13 +173,14 @@ describe('List parts - object keys with special characters: `\'`', () => {
|
||||||
.then(res => {
|
.then(res => {
|
||||||
uploadId = res;
|
uploadId = res;
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
})
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
afterEach(() => deletePart(s3, bucketUtil, key, uploadId));
|
afterEach(() => deletePart(s3, bucketUtil, key, uploadId));
|
||||||
|
|
||||||
it('should list parts of an object with `\'` in its key',
|
test(
|
||||||
done => test(s3, bucket, key, uploadId, done));
|
'should list parts of an object with `\'` in its key',
|
||||||
|
done => test(s3, bucket, key, uploadId, done)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -194,13 +196,14 @@ describe('List parts - object keys with special characters: `<`', () => {
|
||||||
.then(res => {
|
.then(res => {
|
||||||
uploadId = res;
|
uploadId = res;
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
})
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
afterEach(() => deletePart(s3, bucketUtil, key, uploadId));
|
afterEach(() => deletePart(s3, bucketUtil, key, uploadId));
|
||||||
|
|
||||||
it('should list parts of an object with `<` in its key',
|
test(
|
||||||
done => test(s3, bucket, key, uploadId, done));
|
'should list parts of an object with `<` in its key',
|
||||||
|
done => test(s3, bucket, key, uploadId, done)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -216,12 +219,13 @@ describe('List parts - object keys with special characters: `>`', () => {
|
||||||
.then(res => {
|
.then(res => {
|
||||||
uploadId = res;
|
uploadId = res;
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
})
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
afterEach(() => deletePart(s3, bucketUtil, key, uploadId));
|
afterEach(() => deletePart(s3, bucketUtil, key, uploadId));
|
||||||
|
|
||||||
it('should list parts of an object with `>` in its key',
|
test(
|
||||||
done => test(s3, bucket, key, uploadId, done));
|
'should list parts of an object with `>` in its key',
|
||||||
|
done => test(s3, bucket, key, uploadId, done)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -108,15 +108,13 @@ describe('aws-node-sdk test suite of listMultipartUploads', () =>
|
||||||
UploadId: data.uploadId,
|
UploadId: data.uploadId,
|
||||||
})
|
})
|
||||||
.then(() => bucketUtil.empty(bucket))
|
.then(() => bucketUtil.empty(bucket))
|
||||||
.then(() => bucketUtil.deleteOne(bucket))
|
.then(() => bucketUtil.deleteOne(bucket)));
|
||||||
);
|
|
||||||
|
|
||||||
it('should list ongoing multipart uploads', () =>
|
test('should list ongoing multipart uploads', () =>
|
||||||
s3.listMultipartUploadsAsync({ Bucket: bucket })
|
s3.listMultipartUploadsAsync({ Bucket: bucket })
|
||||||
.then(res => checkValues(res, data))
|
.then(res => checkValues(res, data)));
|
||||||
);
|
|
||||||
|
|
||||||
it('should list ongoing multipart uploads with params', () => {
|
test('should list ongoing multipart uploads with params', () => {
|
||||||
data.prefixVal = 'to';
|
data.prefixVal = 'to';
|
||||||
data.delimiter = 'test-delimiter';
|
data.delimiter = 'test-delimiter';
|
||||||
data.maxUploads = 1;
|
data.maxUploads = 1;
|
||||||
|
@ -130,7 +128,7 @@ describe('aws-node-sdk test suite of listMultipartUploads', () =>
|
||||||
.then(res => checkValues(res, data));
|
.then(res => checkValues(res, data));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should list 0 multipart uploads when MaxUploads is 0', () => {
|
test('should list 0 multipart uploads when MaxUploads is 0', () => {
|
||||||
data.maxUploads = 0;
|
data.maxUploads = 0;
|
||||||
|
|
||||||
return s3.listMultipartUploadsAsync({
|
return s3.listMultipartUploadsAsync({
|
||||||
|
|
|
@ -8,13 +8,12 @@ const bucket = 'bucketlistparts';
|
||||||
const object = 'toto';
|
const object = 'toto';
|
||||||
|
|
||||||
function checkError(err, statusCode, code) {
|
function checkError(err, statusCode, code) {
|
||||||
assert.strictEqual(err.statusCode, statusCode);
|
expect(err.statusCode).toBe(statusCode);
|
||||||
assert.strictEqual(err.code, code);
|
expect(err.code).toBe(code);
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkNoError(err) {
|
function checkNoError(err) {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const body = Buffer.alloc(1024 * 1024 * 5, 'a');
|
const body = Buffer.alloc(1024 * 1024 * 5, 'a');
|
||||||
|
@ -30,11 +29,17 @@ const testsOrder = [
|
||||||
];
|
];
|
||||||
|
|
||||||
describe('More MPU tests', () => {
|
describe('More MPU tests', () => {
|
||||||
|
let testContext;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
testContext = {};
|
||||||
|
});
|
||||||
|
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
let bucketUtil;
|
let bucketUtil;
|
||||||
let s3;
|
let s3;
|
||||||
|
|
||||||
beforeEach(function beforeEachF(done) {
|
beforeEach(done => {
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
|
@ -42,7 +47,7 @@ describe('More MPU tests', () => {
|
||||||
next => s3.createMultipartUpload({ Bucket: bucket,
|
next => s3.createMultipartUpload({ Bucket: bucket,
|
||||||
Key: object }, (err, data) => {
|
Key: object }, (err, data) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
this.currentTest.UploadId = data.UploadId;
|
testContext.currentTest.UploadId = data.UploadId;
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
next => s3.uploadPart({
|
next => s3.uploadPart({
|
||||||
|
@ -50,9 +55,9 @@ describe('More MPU tests', () => {
|
||||||
Key: object,
|
Key: object,
|
||||||
PartNumber: 1000,
|
PartNumber: 1000,
|
||||||
Body: body,
|
Body: body,
|
||||||
UploadId: this.currentTest.UploadId }, (err, data) => {
|
UploadId: testContext.currentTest.UploadId }, (err, data) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
this.currentTest.Etag = data.ETag;
|
testContext.currentTest.Etag = data.ETag;
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
next => s3.uploadPart({
|
next => s3.uploadPart({
|
||||||
|
@ -60,13 +65,13 @@ describe('More MPU tests', () => {
|
||||||
Key: object,
|
Key: object,
|
||||||
PartNumber: 3,
|
PartNumber: 3,
|
||||||
Body: body,
|
Body: body,
|
||||||
UploadId: this.currentTest.UploadId }, err => next(err)),
|
UploadId: testContext.currentTest.UploadId }, err => next(err)),
|
||||||
next => s3.uploadPart({
|
next => s3.uploadPart({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: object,
|
Key: object,
|
||||||
PartNumber: 8,
|
PartNumber: 8,
|
||||||
Body: body,
|
Body: body,
|
||||||
UploadId: this.currentTest.UploadId }, err => next(err)),
|
UploadId: testContext.currentTest.UploadId }, err => next(err)),
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -78,8 +83,8 @@ describe('More MPU tests', () => {
|
||||||
], done);
|
], done);
|
||||||
});
|
});
|
||||||
testsOrder.forEach(testOrder => {
|
testsOrder.forEach(testOrder => {
|
||||||
it('should complete MPU by concatenating the parts in ' +
|
test('should complete MPU by concatenating the parts in ' +
|
||||||
`the following order: ${testOrder.values}`, function itF(done) {
|
`the following order: ${testOrder.values}`, done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.completeMultipartUpload({
|
next => s3.completeMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
|
@ -87,27 +92,27 @@ describe('More MPU tests', () => {
|
||||||
MultipartUpload: {
|
MultipartUpload: {
|
||||||
Parts: [
|
Parts: [
|
||||||
{
|
{
|
||||||
ETag: this.test.Etag,
|
ETag: testContext.test.Etag,
|
||||||
PartNumber: testOrder.values[0],
|
PartNumber: testOrder.values[0],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ETag: this.test.Etag,
|
ETag: testContext.test.Etag,
|
||||||
PartNumber: testOrder.values[1],
|
PartNumber: testOrder.values[1],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ETag: this.test.Etag,
|
ETag: testContext.test.Etag,
|
||||||
PartNumber: testOrder.values[2],
|
PartNumber: testOrder.values[2],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
UploadId: this.test.UploadId }, next),
|
UploadId: testContext.test.UploadId }, next),
|
||||||
], err => {
|
], err => {
|
||||||
if (testOrder.err) {
|
if (testOrder.err) {
|
||||||
checkError(err, 400, 'InvalidPartOrder');
|
checkError(err, 400, 'InvalidPartOrder');
|
||||||
return s3.abortMultipartUpload({
|
return s3.abortMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: object,
|
Key: object,
|
||||||
UploadId: this.test.UploadId,
|
UploadId: testContext.test.UploadId,
|
||||||
}, done);
|
}, done);
|
||||||
}
|
}
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
|
|
@ -11,13 +11,12 @@ const bucketName = 'multi-object-delete-234-634';
|
||||||
const key = 'key';
|
const key = 'key';
|
||||||
|
|
||||||
function checkNoError(err) {
|
function checkNoError(err) {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkError(err, code) {
|
function checkError(err, code) {
|
||||||
assert.notEqual(err, null, 'Expected failure but got success');
|
expect(err).not.toEqual(null);
|
||||||
assert.strictEqual(err.code, code);
|
expect(err.code).toBe(code);
|
||||||
}
|
}
|
||||||
|
|
||||||
function sortList(list) {
|
function sortList(list) {
|
||||||
|
@ -42,7 +41,7 @@ function createObjectsList(size) {
|
||||||
return objects;
|
return objects;
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('Multi-Object Delete Success', function success() {
|
describe('Multi-Object Delete Success', () => {
|
||||||
this.timeout(360000);
|
this.timeout(360000);
|
||||||
let bucketUtil;
|
let bucketUtil;
|
||||||
let s3;
|
let s3;
|
||||||
|
@ -85,7 +84,7 @@ describe('Multi-Object Delete Success', function success() {
|
||||||
|
|
||||||
afterEach(() => s3.deleteBucketAsync({ Bucket: bucketName }));
|
afterEach(() => s3.deleteBucketAsync({ Bucket: bucketName }));
|
||||||
|
|
||||||
it('should batch delete 1000 objects', () => {
|
test('should batch delete 1000 objects', () => {
|
||||||
const objects = createObjectsList(1000);
|
const objects = createObjectsList(1000);
|
||||||
return s3.deleteObjectsAsync({
|
return s3.deleteObjectsAsync({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
|
@ -94,16 +93,16 @@ describe('Multi-Object Delete Success', function success() {
|
||||||
Quiet: false,
|
Quiet: false,
|
||||||
},
|
},
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.Deleted.length, 1000);
|
expect(res.Deleted.length).toBe(1000);
|
||||||
// order of returned objects not sorted
|
// order of returned objects not sorted
|
||||||
assert.deepStrictEqual(sortList(res.Deleted), sortList(objects));
|
assert.deepStrictEqual(sortList(res.Deleted), sortList(objects));
|
||||||
assert.strictEqual(res.Errors.length, 0);
|
expect(res.Errors.length).toBe(0);
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should batch delete 1000 objects quietly', () => {
|
test('should batch delete 1000 objects quietly', () => {
|
||||||
const objects = createObjectsList(1000);
|
const objects = createObjectsList(1000);
|
||||||
return s3.deleteObjectsAsync({
|
return s3.deleteObjectsAsync({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
|
@ -112,8 +111,8 @@ describe('Multi-Object Delete Success', function success() {
|
||||||
Quiet: true,
|
Quiet: true,
|
||||||
},
|
},
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.Deleted.length, 0);
|
expect(res.Deleted.length).toBe(0);
|
||||||
assert.strictEqual(res.Errors.length, 0);
|
expect(res.Errors.length).toBe(0);
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
});
|
});
|
||||||
|
@ -137,7 +136,8 @@ describe('Multi-Object Delete Error Responses', () => {
|
||||||
|
|
||||||
afterEach(() => s3.deleteBucketAsync({ Bucket: bucketName }));
|
afterEach(() => s3.deleteBucketAsync({ Bucket: bucketName }));
|
||||||
|
|
||||||
it('should return error if request deletion of more than 1000 objects',
|
test(
|
||||||
|
'should return error if request deletion of more than 1000 objects',
|
||||||
() => {
|
() => {
|
||||||
const objects = createObjectsList(1001);
|
const objects = createObjectsList(1001);
|
||||||
return s3.deleteObjectsAsync({
|
return s3.deleteObjectsAsync({
|
||||||
|
@ -148,10 +148,10 @@ describe('Multi-Object Delete Error Responses', () => {
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
checkError(err, 'MalformedXML');
|
checkError(err, 'MalformedXML');
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return error if request deletion of 0 objects',
|
test('should return error if request deletion of 0 objects', () => {
|
||||||
() => {
|
|
||||||
const objects = createObjectsList(0);
|
const objects = createObjectsList(0);
|
||||||
return s3.deleteObjectsAsync({
|
return s3.deleteObjectsAsync({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
|
@ -163,8 +163,7 @@ describe('Multi-Object Delete Error Responses', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return no error if try to delete non-existent objects',
|
test('should return no error if try to delete non-existent objects', () => {
|
||||||
() => {
|
|
||||||
const objects = createObjectsList(1000);
|
const objects = createObjectsList(1000);
|
||||||
return s3.deleteObjectsAsync({
|
return s3.deleteObjectsAsync({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
|
@ -172,14 +171,14 @@ describe('Multi-Object Delete Error Responses', () => {
|
||||||
Objects: objects,
|
Objects: objects,
|
||||||
},
|
},
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.Deleted.length, 1000);
|
expect(res.Deleted.length).toBe(1000);
|
||||||
assert.strictEqual(res.Errors.length, 0);
|
expect(res.Errors.length).toBe(0);
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if no such bucket', () => {
|
test('should return error if no such bucket', () => {
|
||||||
const objects = createObjectsList(1);
|
const objects = createObjectsList(1);
|
||||||
return s3.deleteObjectsAsync({
|
return s3.deleteObjectsAsync({
|
||||||
Bucket: 'nosuchbucket2323292093',
|
Bucket: 'nosuchbucket2323292093',
|
||||||
|
@ -193,12 +192,12 @@ describe('Multi-Object Delete Error Responses', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('Multi-Object Delete Access', function access() {
|
describe('Multi-Object Delete Access', () => {
|
||||||
this.timeout(360000);
|
this.timeout(360000);
|
||||||
let bucketUtil;
|
let bucketUtil;
|
||||||
let s3;
|
let s3;
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
const createObjects = [];
|
const createObjects = [];
|
||||||
bucketUtil = new BucketUtility('default', {
|
bucketUtil = new BucketUtility('default', {
|
||||||
signatureVersion: 'v4',
|
signatureVersion: 'v4',
|
||||||
|
@ -225,9 +224,9 @@ describe('Multi-Object Delete Access', function access() {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
after(() => s3.deleteBucketAsync({ Bucket: bucketName }));
|
afterAll(() => s3.deleteBucketAsync({ Bucket: bucketName }));
|
||||||
|
|
||||||
it('should return access denied error for each object where no acl ' +
|
test('should return access denied error for each object where no acl ' +
|
||||||
'permission', () => {
|
'permission', () => {
|
||||||
const objects = createObjectsList(500);
|
const objects = createObjectsList(500);
|
||||||
const errorList = createObjectsList(500);
|
const errorList = createObjectsList(500);
|
||||||
|
@ -243,16 +242,16 @@ describe('Multi-Object Delete Access', function access() {
|
||||||
Quiet: false,
|
Quiet: false,
|
||||||
},
|
},
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.Deleted.length, 0);
|
expect(res.Deleted.length).toBe(0);
|
||||||
assert.deepStrictEqual(sortList(res.Errors), sortList(errorList));
|
assert.deepStrictEqual(sortList(res.Errors), sortList(errorList));
|
||||||
assert.strictEqual(res.Errors.length, 500);
|
expect(res.Errors.length).toBe(500);
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should batch delete objects where requester has permission', () => {
|
test('should batch delete objects where requester has permission', () => {
|
||||||
const objects = createObjectsList(500);
|
const objects = createObjectsList(500);
|
||||||
return s3.deleteObjectsAsync({
|
return s3.deleteObjectsAsync({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
|
@ -261,8 +260,8 @@ describe('Multi-Object Delete Access', function access() {
|
||||||
Quiet: false,
|
Quiet: false,
|
||||||
},
|
},
|
||||||
}).then(res => {
|
}).then(res => {
|
||||||
assert.strictEqual(res.Deleted.length, 500);
|
expect(res.Deleted.length).toBe(500);
|
||||||
assert.strictEqual(res.Errors.length, 0);
|
expect(res.Errors.length).toBe(0);
|
||||||
}).catch(err => {
|
}).catch(err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
});
|
});
|
||||||
|
|
|
@ -44,13 +44,12 @@ const otherAccountBucketUtility = new BucketUtility('lisa', {});
|
||||||
const otherAccountS3 = otherAccountBucketUtility.s3;
|
const otherAccountS3 = otherAccountBucketUtility.s3;
|
||||||
|
|
||||||
function checkNoError(err) {
|
function checkNoError(err) {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkError(err, code) {
|
function checkError(err, code) {
|
||||||
assert.notEqual(err, null, 'Expected failure but got success');
|
expect(err).not.toEqual(null);
|
||||||
assert.strictEqual(err.code, code);
|
expect(err.code).toBe(code);
|
||||||
}
|
}
|
||||||
|
|
||||||
function dateFromNow(diff) {
|
function dateFromNow(diff) {
|
||||||
|
@ -72,7 +71,7 @@ describe('Object Copy', () => {
|
||||||
let etagTrim;
|
let etagTrim;
|
||||||
let lastModified;
|
let lastModified;
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
return bucketUtil.empty(sourceBucketName)
|
return bucketUtil.empty(sourceBucketName)
|
||||||
|
@ -119,10 +118,9 @@ describe('Object Copy', () => {
|
||||||
}));
|
}));
|
||||||
|
|
||||||
afterEach(() => bucketUtil.empty(sourceBucketName)
|
afterEach(() => bucketUtil.empty(sourceBucketName)
|
||||||
.then(() => bucketUtil.empty(destBucketName))
|
.then(() => bucketUtil.empty(destBucketName)));
|
||||||
);
|
|
||||||
|
|
||||||
after(() => bucketUtil.deleteMany([sourceBucketName, destBucketName]));
|
afterAll(() => bucketUtil.deleteMany([sourceBucketName, destBucketName]));
|
||||||
|
|
||||||
function requestCopy(fields, cb) {
|
function requestCopy(fields, cb) {
|
||||||
s3.copyObject(Object.assign({
|
s3.copyObject(Object.assign({
|
||||||
|
@ -135,18 +133,16 @@ describe('Object Copy', () => {
|
||||||
function successCopyCheck(error, response, copyVersionMetadata,
|
function successCopyCheck(error, response, copyVersionMetadata,
|
||||||
destBucketName, destObjName, done) {
|
destBucketName, destObjName, done) {
|
||||||
checkNoError(error);
|
checkNoError(error);
|
||||||
assert.strictEqual(response.ETag, etag);
|
expect(response.ETag).toBe(etag);
|
||||||
const copyLastModified = new Date(response.LastModified)
|
const copyLastModified = new Date(response.LastModified)
|
||||||
.toUTCString();
|
.toUTCString();
|
||||||
s3.getObject({ Bucket: destBucketName,
|
s3.getObject({ Bucket: destBucketName,
|
||||||
Key: destObjName }, (err, res) => {
|
Key: destObjName }, (err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.Body.toString(),
|
expect(res.Body.toString()).toBe(content);
|
||||||
content);
|
|
||||||
assert.deepStrictEqual(res.Metadata,
|
assert.deepStrictEqual(res.Metadata,
|
||||||
copyVersionMetadata);
|
copyVersionMetadata);
|
||||||
assert.strictEqual(res.LastModified,
|
expect(res.LastModified).toBe(copyLastModified);
|
||||||
copyLastModified);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -155,8 +151,8 @@ describe('Object Copy', () => {
|
||||||
s3.getObjectTagging({ Bucket: destBucketName, Key: destObjName },
|
s3.getObjectTagging({ Bucket: destBucketName, Key: destObjName },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(data.TagSet[0].Key, key);
|
expect(data.TagSet[0].Key).toBe(key);
|
||||||
assert.strictEqual(data.TagSet[0].Value, value);
|
expect(data.TagSet[0].Value).toBe(value);
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -165,12 +161,12 @@ describe('Object Copy', () => {
|
||||||
s3.getObjectTagging({ Bucket: destBucketName, Key: destObjName },
|
s3.getObjectTagging({ Bucket: destBucketName, Key: destObjName },
|
||||||
(err, data) => {
|
(err, data) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(data.TagSet.length, 0);
|
expect(data.TagSet.length).toBe(0);
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
it('should copy an object from a source bucket to a different ' +
|
test('should copy an object from a source bucket to a different ' +
|
||||||
'destination bucket and copy the metadata if no metadata directve' +
|
'destination bucket and copy the metadata if no metadata directve' +
|
||||||
'header provided', done => {
|
'header provided', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
|
@ -181,7 +177,7 @@ describe('Object Copy', () => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from a source bucket to a different ' +
|
test('should copy an object from a source bucket to a different ' +
|
||||||
'destination bucket and copy the tag set if no tagging directive' +
|
'destination bucket and copy the tag set if no tagging directive' +
|
||||||
'header provided', done => {
|
'header provided', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
|
@ -192,7 +188,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 400 InvalidArgument if invalid tagging ' +
|
test('should return 400 InvalidArgument if invalid tagging ' +
|
||||||
'directive', done => {
|
'directive', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
|
@ -203,7 +199,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from a source bucket to a different ' +
|
test('should copy an object from a source bucket to a different ' +
|
||||||
'destination bucket and copy the tag set if COPY tagging ' +
|
'destination bucket and copy the tag set if COPY tagging ' +
|
||||||
'directive header provided', done => {
|
'directive header provided', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
|
@ -215,7 +211,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object and tag set if COPY ' +
|
test('should copy an object and tag set if COPY ' +
|
||||||
'included as tag directive header (and ignore any new ' +
|
'included as tag directive header (and ignore any new ' +
|
||||||
'tag set sent with copy request)', done => {
|
'tag set sent with copy request)', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
|
@ -233,9 +229,8 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from a source to the same destination ' +
|
test('should copy an object from a source to the same destination ' +
|
||||||
'updating tag if REPLACE tagging directive header provided',
|
'updating tag if REPLACE tagging directive header provided', done => {
|
||||||
done => {
|
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
TaggingDirective: 'REPLACE', Tagging: newTagging },
|
TaggingDirective: 'REPLACE', Tagging: newTagging },
|
||||||
|
@ -245,7 +240,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from a source to the same destination ' +
|
test('should copy an object from a source to the same destination ' +
|
||||||
'return no tag if REPLACE tagging directive header provided but ' +
|
'return no tag if REPLACE tagging directive header provided but ' +
|
||||||
'"x-amz-tagging" header is not specified', done => {
|
'"x-amz-tagging" header is not specified', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
|
@ -257,7 +252,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from a source to the same destination ' +
|
test('should copy an object from a source to the same destination ' +
|
||||||
'return no tag if COPY tagging directive header but provided from ' +
|
'return no tag if COPY tagging directive header but provided from ' +
|
||||||
'an empty object', done => {
|
'an empty object', done => {
|
||||||
s3.putObject({ Bucket: sourceBucketName, Key: 'emptyobject' },
|
s3.putObject({ Bucket: sourceBucketName, Key: 'emptyobject' },
|
||||||
|
@ -273,9 +268,8 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from a source to the same destination ' +
|
test('should copy an object from a source to the same destination ' +
|
||||||
'updating tag if REPLACE tagging directive header provided',
|
'updating tag if REPLACE tagging directive header provided', done => {
|
||||||
done => {
|
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
TaggingDirective: 'REPLACE', Tagging: newTagging },
|
TaggingDirective: 'REPLACE', Tagging: newTagging },
|
||||||
|
@ -287,7 +281,7 @@ describe('Object Copy', () => {
|
||||||
|
|
||||||
describe('Copy object updating tag set', () => {
|
describe('Copy object updating tag set', () => {
|
||||||
taggingTests.forEach(taggingTest => {
|
taggingTests.forEach(taggingTest => {
|
||||||
it(taggingTest.it, done => {
|
test(taggingTest.it, done => {
|
||||||
const key = encodeURIComponent(taggingTest.tag.key);
|
const key = encodeURIComponent(taggingTest.tag.key);
|
||||||
const value = encodeURIComponent(taggingTest.tag.value);
|
const value = encodeURIComponent(taggingTest.tag.value);
|
||||||
const tagging = `${key}=${value}`;
|
const tagging = `${key}=${value}`;
|
||||||
|
@ -299,8 +293,7 @@ describe('Object Copy', () => {
|
||||||
checkError(err, taggingTest.error);
|
checkError(err, taggingTest.error);
|
||||||
return done();
|
return done();
|
||||||
}
|
}
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
|
||||||
return checkSuccessTagging(taggingTest.tag.key,
|
return checkSuccessTagging(taggingTest.tag.key,
|
||||||
taggingTest.tag.value, done);
|
taggingTest.tag.value, done);
|
||||||
});
|
});
|
||||||
|
@ -308,10 +301,9 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should also copy additional headers (CacheControl, ' +
|
test('should also copy additional headers (CacheControl, ' +
|
||||||
'ContentDisposition, ContentEncoding, Expires) when copying an ' +
|
'ContentDisposition, ContentEncoding, Expires) when copying an ' +
|
||||||
'object from a source bucket to a different destination bucket',
|
'object from a source bucket to a different destination bucket', done => {
|
||||||
done => {
|
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}` },
|
CopySource: `${sourceBucketName}/${sourceObjName}` },
|
||||||
err => {
|
err => {
|
||||||
|
@ -321,25 +313,19 @@ describe('Object Copy', () => {
|
||||||
if (err) {
|
if (err) {
|
||||||
done(err);
|
done(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.CacheControl,
|
expect(res.CacheControl).toBe(originalCacheControl);
|
||||||
originalCacheControl);
|
expect(res.ContentDisposition).toBe(originalContentDisposition);
|
||||||
assert.strictEqual(res.ContentDisposition,
|
|
||||||
originalContentDisposition);
|
|
||||||
// Should remove V4 streaming value 'aws-chunked'
|
// Should remove V4 streaming value 'aws-chunked'
|
||||||
// to be compatible with AWS behavior
|
// to be compatible with AWS behavior
|
||||||
assert.strictEqual(res.ContentEncoding,
|
expect(res.ContentEncoding).toBe('base64,');
|
||||||
'base64,'
|
expect(res.Expires).toBe(originalExpires.toGMTString());
|
||||||
);
|
|
||||||
assert.strictEqual(res.Expires,
|
|
||||||
originalExpires.toGMTString());
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from a source bucket to a different ' +
|
test('should copy an object from a source bucket to a different ' +
|
||||||
'key in the same bucket',
|
'key in the same bucket', done => {
|
||||||
done => {
|
|
||||||
s3.copyObject({ Bucket: sourceBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: sourceBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}` },
|
CopySource: `${sourceBucketName}/${sourceObjName}` },
|
||||||
(err, res) =>
|
(err, res) =>
|
||||||
|
@ -348,9 +334,8 @@ describe('Object Copy', () => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not return error if copying object w/ > ' +
|
test('should not return error if copying object w/ > ' +
|
||||||
'2KB user-defined md and COPY directive',
|
'2KB user-defined md and COPY directive', done => {
|
||||||
done => {
|
|
||||||
const metadata = genMaxSizeMetaHeaders();
|
const metadata = genMaxSizeMetaHeaders();
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: destBucketName,
|
Bucket: destBucketName,
|
||||||
|
@ -360,19 +345,18 @@ describe('Object Copy', () => {
|
||||||
Metadata: metadata,
|
Metadata: metadata,
|
||||||
};
|
};
|
||||||
s3.copyObject(params, err => {
|
s3.copyObject(params, err => {
|
||||||
assert.strictEqual(err, null, `Unexpected err: ${err}`);
|
expect(err).toBe(null);
|
||||||
// add one more byte to be over the limit
|
// add one more byte to be over the limit
|
||||||
metadata.header0 = `${metadata.header0}${'0'}`;
|
metadata.header0 = `${metadata.header0}${'0'}`;
|
||||||
s3.copyObject(params, err => {
|
s3.copyObject(params, err => {
|
||||||
assert.strictEqual(err, null, `Unexpected err: ${err}`);
|
expect(err).toBe(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if copying object w/ > 2KB ' +
|
test('should return error if copying object w/ > 2KB ' +
|
||||||
'user-defined md and REPLACE directive',
|
'user-defined md and REPLACE directive', done => {
|
||||||
done => {
|
|
||||||
const metadata = genMaxSizeMetaHeaders();
|
const metadata = genMaxSizeMetaHeaders();
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: destBucketName,
|
Bucket: destBucketName,
|
||||||
|
@ -382,19 +366,19 @@ describe('Object Copy', () => {
|
||||||
Metadata: metadata,
|
Metadata: metadata,
|
||||||
};
|
};
|
||||||
s3.copyObject(params, err => {
|
s3.copyObject(params, err => {
|
||||||
assert.strictEqual(err, null, `Unexpected err: ${err}`);
|
expect(err).toBe(null);
|
||||||
// add one more byte to be over the limit
|
// add one more byte to be over the limit
|
||||||
metadata.header0 = `${metadata.header0}${'0'}`;
|
metadata.header0 = `${metadata.header0}${'0'}`;
|
||||||
s3.copyObject(params, err => {
|
s3.copyObject(params, err => {
|
||||||
assert(err, 'Expected err but did not find one');
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'MetadataTooLarge');
|
expect(err.code).toBe('MetadataTooLarge');
|
||||||
assert.strictEqual(err.statusCode, 400);
|
expect(err.statusCode).toBe(400);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object from a source to the same destination ' +
|
test('should copy an object from a source to the same destination ' +
|
||||||
'(update metadata)', done => {
|
'(update metadata)', done => {
|
||||||
s3.copyObject({ Bucket: sourceBucketName, Key: sourceObjName,
|
s3.copyObject({ Bucket: sourceBucketName, Key: sourceObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
|
@ -406,7 +390,7 @@ describe('Object Copy', () => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object and replace the metadata if replace ' +
|
test('should copy an object and replace the metadata if replace ' +
|
||||||
'included as metadata directive header', done => {
|
'included as metadata directive header', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
|
@ -419,7 +403,7 @@ describe('Object Copy', () => {
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object and replace ContentType if replace ' +
|
test('should copy an object and replace ContentType if replace ' +
|
||||||
'included as a metadata directive header, and new ContentType is ' +
|
'included as a metadata directive header, and new ContentType is ' +
|
||||||
'provided', done => {
|
'provided', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
|
@ -432,13 +416,13 @@ describe('Object Copy', () => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.ContentType, 'image');
|
expect(res.ContentType).toBe('image');
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object and keep ContentType if replace ' +
|
test('should copy an object and keep ContentType if replace ' +
|
||||||
'included as a metadata directive header, but no new ContentType ' +
|
'included as a metadata directive header, but no new ContentType ' +
|
||||||
'is provided', done => {
|
'is provided', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
|
@ -450,14 +434,13 @@ describe('Object Copy', () => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.ContentType,
|
expect(res.ContentType).toBe('application/octet-stream');
|
||||||
'application/octet-stream');
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should also replace additional headers if replace ' +
|
test('should also replace additional headers if replace ' +
|
||||||
'included as metadata directive header and new headers are ' +
|
'included as metadata directive header and new headers are ' +
|
||||||
'specified', done => {
|
'specified', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
|
@ -474,19 +457,18 @@ describe('Object Copy', () => {
|
||||||
if (err) {
|
if (err) {
|
||||||
done(err);
|
done(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.CacheControl, newCacheControl);
|
expect(res.CacheControl).toBe(newCacheControl);
|
||||||
assert.strictEqual(res.ContentDisposition,
|
expect(res.ContentDisposition).toBe(newContentDisposition);
|
||||||
newContentDisposition);
|
|
||||||
// Should remove V4 streaming value 'aws-chunked'
|
// Should remove V4 streaming value 'aws-chunked'
|
||||||
// to be compatible with AWS behavior
|
// to be compatible with AWS behavior
|
||||||
assert.strictEqual(res.ContentEncoding, 'gzip,');
|
expect(res.ContentEncoding).toBe('gzip,');
|
||||||
assert.strictEqual(res.Expires, newExpires.toGMTString());
|
expect(res.Expires).toBe(newExpires.toGMTString());
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object and the metadata if copy ' +
|
test('should copy an object and the metadata if copy ' +
|
||||||
'included as metadata directive header (and ignore any new ' +
|
'included as metadata directive header (and ignore any new ' +
|
||||||
'metadata sent with copy request)', done => {
|
'metadata sent with copy request)', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
|
@ -504,7 +486,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object and its additional headers if copy ' +
|
test('should copy an object and its additional headers if copy ' +
|
||||||
'included as metadata directive header (and ignore any new ' +
|
'included as metadata directive header (and ignore any new ' +
|
||||||
'headers sent with copy request)', done => {
|
'headers sent with copy request)', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
|
@ -522,20 +504,16 @@ describe('Object Copy', () => {
|
||||||
if (err) {
|
if (err) {
|
||||||
done(err);
|
done(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.CacheControl,
|
expect(res.CacheControl).toBe(originalCacheControl);
|
||||||
originalCacheControl);
|
expect(res.ContentDisposition).toBe(originalContentDisposition);
|
||||||
assert.strictEqual(res.ContentDisposition,
|
expect(res.ContentEncoding).toBe('base64,');
|
||||||
originalContentDisposition);
|
expect(res.Expires).toBe(originalExpires.toGMTString());
|
||||||
assert.strictEqual(res.ContentEncoding,
|
|
||||||
'base64,');
|
|
||||||
assert.strictEqual(res.Expires,
|
|
||||||
originalExpires.toGMTString());
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy a 0 byte object to different destination', done => {
|
test('should copy a 0 byte object to different destination', done => {
|
||||||
const emptyFileETag = '"d41d8cd98f00b204e9800998ecf8427e"';
|
const emptyFileETag = '"d41d8cd98f00b204e9800998ecf8427e"';
|
||||||
s3.putObject({ Bucket: sourceBucketName, Key: sourceObjName,
|
s3.putObject({ Bucket: sourceBucketName, Key: sourceObjName,
|
||||||
Body: '', Metadata: originalMetadata }, () => {
|
Body: '', Metadata: originalMetadata }, () => {
|
||||||
|
@ -544,20 +522,20 @@ describe('Object Copy', () => {
|
||||||
},
|
},
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.ETag, emptyFileETag);
|
expect(res.ETag).toBe(emptyFileETag);
|
||||||
s3.getObject({ Bucket: destBucketName,
|
s3.getObject({ Bucket: destBucketName,
|
||||||
Key: destObjName }, (err, res) => {
|
Key: destObjName }, (err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.deepStrictEqual(res.Metadata,
|
assert.deepStrictEqual(res.Metadata,
|
||||||
originalMetadata);
|
originalMetadata);
|
||||||
assert.strictEqual(res.ETag, emptyFileETag);
|
expect(res.ETag).toBe(emptyFileETag);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy a 0 byte object to same destination', done => {
|
test('should copy a 0 byte object to same destination', done => {
|
||||||
const emptyFileETag = '"d41d8cd98f00b204e9800998ecf8427e"';
|
const emptyFileETag = '"d41d8cd98f00b204e9800998ecf8427e"';
|
||||||
s3.putObject({ Bucket: sourceBucketName, Key: sourceObjName,
|
s3.putObject({ Bucket: sourceBucketName, Key: sourceObjName,
|
||||||
Body: '' }, () => {
|
Body: '' }, () => {
|
||||||
|
@ -567,21 +545,21 @@ describe('Object Copy', () => {
|
||||||
},
|
},
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.ETag, emptyFileETag);
|
expect(res.ETag).toBe(emptyFileETag);
|
||||||
s3.getObject({ Bucket: sourceBucketName,
|
s3.getObject({ Bucket: sourceBucketName,
|
||||||
Key: sourceObjName }, (err, res) => {
|
Key: sourceObjName }, (err, res) => {
|
||||||
assert.deepStrictEqual(res.Metadata,
|
assert.deepStrictEqual(res.Metadata,
|
||||||
{});
|
{});
|
||||||
assert.deepStrictEqual(res.StorageClass,
|
assert.deepStrictEqual(res.StorageClass,
|
||||||
'REDUCED_REDUNDANCY');
|
'REDUCED_REDUNDANCY');
|
||||||
assert.strictEqual(res.ETag, emptyFileETag);
|
expect(res.ETag).toBe(emptyFileETag);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object to a different destination and change ' +
|
test('should copy an object to a different destination and change ' +
|
||||||
'the storage class if storage class header provided', done => {
|
'the storage class if storage class header provided', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
|
@ -591,14 +569,13 @@ describe('Object Copy', () => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
s3.getObject({ Bucket: destBucketName,
|
s3.getObject({ Bucket: destBucketName,
|
||||||
Key: destObjName }, (err, res) => {
|
Key: destObjName }, (err, res) => {
|
||||||
assert.strictEqual(res.StorageClass,
|
expect(res.StorageClass).toBe('REDUCED_REDUNDANCY');
|
||||||
'REDUCED_REDUNDANCY');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object to the same destination and change the ' +
|
test('should copy an object to the same destination and change the ' +
|
||||||
'storage class if the storage class header provided', done => {
|
'storage class if the storage class header provided', done => {
|
||||||
s3.copyObject({ Bucket: sourceBucketName, Key: sourceObjName,
|
s3.copyObject({ Bucket: sourceBucketName, Key: sourceObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
|
@ -609,14 +586,13 @@ describe('Object Copy', () => {
|
||||||
s3.getObject({ Bucket: sourceBucketName,
|
s3.getObject({ Bucket: sourceBucketName,
|
||||||
Key: sourceObjName }, (err, res) => {
|
Key: sourceObjName }, (err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.StorageClass,
|
expect(res.StorageClass).toBe('REDUCED_REDUNDANCY');
|
||||||
'REDUCED_REDUNDANCY');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object to a new bucket and overwrite an already ' +
|
test('should copy an object to a new bucket and overwrite an already ' +
|
||||||
'existing object in the destination bucket', done => {
|
'existing object in the destination bucket', done => {
|
||||||
s3.putObject({ Bucket: destBucketName, Key: destObjName,
|
s3.putObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
Body: 'overwrite me', Metadata: originalMetadata }, () => {
|
Body: 'overwrite me', Metadata: originalMetadata }, () => {
|
||||||
|
@ -627,13 +603,13 @@ describe('Object Copy', () => {
|
||||||
},
|
},
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(res.ETag, etag);
|
expect(res.ETag).toBe(etag);
|
||||||
s3.getObject({ Bucket: destBucketName,
|
s3.getObject({ Bucket: destBucketName,
|
||||||
Key: destObjName }, (err, res) => {
|
Key: destObjName }, (err, res) => {
|
||||||
assert.deepStrictEqual(res.Metadata,
|
assert.deepStrictEqual(res.Metadata,
|
||||||
newMetadata);
|
newMetadata);
|
||||||
assert.strictEqual(res.ETag, etag);
|
expect(res.ETag).toBe(etag);
|
||||||
assert.strictEqual(res.Body.toString(), content);
|
expect(res.Body.toString()).toBe(content);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -641,7 +617,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
// skipping test as object level encryption is not implemented yet
|
// skipping test as object level encryption is not implemented yet
|
||||||
it.skip('should copy an object and change the server side encryption' +
|
test.skip('should copy an object and change the server side encryption' +
|
||||||
'option if server side encryption header provided', done => {
|
'option if server side encryption header provided', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
|
@ -651,36 +627,35 @@ describe('Object Copy', () => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
s3.getObject({ Bucket: destBucketName,
|
s3.getObject({ Bucket: destBucketName,
|
||||||
Key: destObjName }, (err, res) => {
|
Key: destObjName }, (err, res) => {
|
||||||
assert.strictEqual(res.ServerSideEncryption,
|
expect(res.ServerSideEncryption).toBe('AES256');
|
||||||
'AES256');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return Not Implemented error for obj. encryption using ' +
|
test('should return Not Implemented error for obj. encryption using ' +
|
||||||
'AWS-managed encryption keys', done => {
|
'AWS-managed encryption keys', done => {
|
||||||
const params = { Bucket: destBucketName, Key: 'key',
|
const params = { Bucket: destBucketName, Key: 'key',
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
ServerSideEncryption: 'AES256' };
|
ServerSideEncryption: 'AES256' };
|
||||||
s3.copyObject(params, err => {
|
s3.copyObject(params, err => {
|
||||||
assert.strictEqual(err.code, 'NotImplemented');
|
expect(err.code).toBe('NotImplemented');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return Not Implemented error for obj. encryption using ' +
|
test('should return Not Implemented error for obj. encryption using ' +
|
||||||
'customer-provided encryption keys', done => {
|
'customer-provided encryption keys', done => {
|
||||||
const params = { Bucket: destBucketName, Key: 'key',
|
const params = { Bucket: destBucketName, Key: 'key',
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
SSECustomerAlgorithm: 'AES256' };
|
SSECustomerAlgorithm: 'AES256' };
|
||||||
s3.copyObject(params, err => {
|
s3.copyObject(params, err => {
|
||||||
assert.strictEqual(err.code, 'NotImplemented');
|
expect(err.code).toBe('NotImplemented');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object and set the acl on the new object', done => {
|
test('should copy an object and set the acl on the new object', done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
ACL: 'authenticated-read',
|
ACL: 'authenticated-read',
|
||||||
|
@ -693,20 +668,17 @@ describe('Object Copy', () => {
|
||||||
// grants:
|
// grants:
|
||||||
// (1) FULL_CONTROL to the object owner
|
// (1) FULL_CONTROL to the object owner
|
||||||
// (2) READ to the authenticated-read
|
// (2) READ to the authenticated-read
|
||||||
assert.strictEqual(res.Grants.length, 2);
|
expect(res.Grants.length).toBe(2);
|
||||||
assert.strictEqual(res.Grants[0].Permission,
|
expect(res.Grants[0].Permission).toBe('FULL_CONTROL');
|
||||||
'FULL_CONTROL');
|
expect(res.Grants[1].Permission).toBe('READ');
|
||||||
assert.strictEqual(res.Grants[1].Permission,
|
expect(res.Grants[1].Grantee.URI).toBe('http://acs.amazonaws.com/groups/' +
|
||||||
'READ');
|
|
||||||
assert.strictEqual(res.Grants[1].Grantee.URI,
|
|
||||||
'http://acs.amazonaws.com/groups/' +
|
|
||||||
'global/AuthenticatedUsers');
|
'global/AuthenticatedUsers');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should copy an object and default the acl on the new object ' +
|
test('should copy an object and default the acl on the new object ' +
|
||||||
'to private even if the copied object had a ' +
|
'to private even if the copied object had a ' +
|
||||||
'different acl', done => {
|
'different acl', done => {
|
||||||
s3.putObjectAcl({ Bucket: sourceBucketName, Key: sourceObjName,
|
s3.putObjectAcl({ Bucket: sourceBucketName, Key: sourceObjName,
|
||||||
|
@ -719,16 +691,15 @@ describe('Object Copy', () => {
|
||||||
Key: destObjName }, (err, res) => {
|
Key: destObjName }, (err, res) => {
|
||||||
// With private ACL, there is only one grant
|
// With private ACL, there is only one grant
|
||||||
// of FULL_CONTROL to the object owner
|
// of FULL_CONTROL to the object owner
|
||||||
assert.strictEqual(res.Grants.length, 1);
|
expect(res.Grants.length).toBe(1);
|
||||||
assert.strictEqual(res.Grants[0].Permission,
|
expect(res.Grants[0].Permission).toBe('FULL_CONTROL');
|
||||||
'FULL_CONTROL');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error if attempt to copy with same source as' +
|
test('should return an error if attempt to copy with same source as' +
|
||||||
'destination and do not change any metadata', done => {
|
'destination and do not change any metadata', done => {
|
||||||
s3.copyObject({ Bucket: sourceBucketName, Key: sourceObjName,
|
s3.copyObject({ Bucket: sourceBucketName, Key: sourceObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
|
@ -739,7 +710,8 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error if attempt to copy from nonexistent bucket',
|
test(
|
||||||
|
'should return an error if attempt to copy from nonexistent bucket',
|
||||||
done => {
|
done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `nobucket453234/${sourceObjName}`,
|
CopySource: `nobucket453234/${sourceObjName}`,
|
||||||
|
@ -748,10 +720,10 @@ describe('Object Copy', () => {
|
||||||
checkError(err, 'NoSuchBucket');
|
checkError(err, 'NoSuchBucket');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return an error if use invalid redirect location',
|
test('should return an error if use invalid redirect location', done => {
|
||||||
done => {
|
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
WebsiteRedirectLocation: 'google.com',
|
WebsiteRedirectLocation: 'google.com',
|
||||||
|
@ -763,7 +735,8 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
it('should return an error if attempt to copy to nonexistent bucket',
|
test(
|
||||||
|
'should return an error if attempt to copy to nonexistent bucket',
|
||||||
done => {
|
done => {
|
||||||
s3.copyObject({ Bucket: 'nobucket453234', Key: destObjName,
|
s3.copyObject({ Bucket: 'nobucket453234', Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
|
@ -772,10 +745,10 @@ describe('Object Copy', () => {
|
||||||
checkError(err, 'NoSuchBucket');
|
checkError(err, 'NoSuchBucket');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return an error if attempt to copy nonexistent object',
|
test('should return an error if attempt to copy nonexistent object', done => {
|
||||||
done => {
|
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/nokey`,
|
CopySource: `${sourceBucketName}/nokey`,
|
||||||
},
|
},
|
||||||
|
@ -785,7 +758,8 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return an error if send invalid metadata directive header',
|
test(
|
||||||
|
'should return an error if send invalid metadata directive header',
|
||||||
done => {
|
done => {
|
||||||
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
s3.copyObject({ Bucket: destBucketName, Key: destObjName,
|
||||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||||
|
@ -795,21 +769,20 @@ describe('Object Copy', () => {
|
||||||
checkError(err, 'InvalidArgument');
|
checkError(err, 'InvalidArgument');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
describe('copying by another account', () => {
|
describe('copying by another account', () => {
|
||||||
const otherAccountBucket = 'otheraccountbucket42342342342';
|
const otherAccountBucket = 'otheraccountbucket42342342342';
|
||||||
const otherAccountKey = 'key';
|
const otherAccountKey = 'key';
|
||||||
beforeEach(() => otherAccountBucketUtility
|
beforeEach(() => otherAccountBucketUtility
|
||||||
.createOne(otherAccountBucket)
|
.createOne(otherAccountBucket));
|
||||||
);
|
|
||||||
|
|
||||||
afterEach(() => otherAccountBucketUtility.empty(otherAccountBucket)
|
afterEach(() => otherAccountBucketUtility.empty(otherAccountBucket)
|
||||||
.then(() => otherAccountBucketUtility
|
.then(() => otherAccountBucketUtility
|
||||||
.deleteOne(otherAccountBucket))
|
.deleteOne(otherAccountBucket)));
|
||||||
);
|
|
||||||
|
|
||||||
it('should not allow an account without read persmission on the ' +
|
test('should not allow an account without read persmission on the ' +
|
||||||
'source object to copy the object', done => {
|
'source object to copy the object', done => {
|
||||||
otherAccountS3.copyObject({ Bucket: otherAccountBucket,
|
otherAccountS3.copyObject({ Bucket: otherAccountBucket,
|
||||||
Key: otherAccountKey,
|
Key: otherAccountKey,
|
||||||
|
@ -821,7 +794,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should not allow an account without write persmission on the ' +
|
test('should not allow an account without write persmission on the ' +
|
||||||
'destination bucket to copy the object', done => {
|
'destination bucket to copy the object', done => {
|
||||||
otherAccountS3.putObject({ Bucket: otherAccountBucket,
|
otherAccountS3.putObject({ Bucket: otherAccountBucket,
|
||||||
Key: otherAccountKey, Body: '' }, () => {
|
Key: otherAccountKey, Body: '' }, () => {
|
||||||
|
@ -836,7 +809,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should allow an account with read permission on the ' +
|
test('should allow an account with read permission on the ' +
|
||||||
'source object and write permission on the destination ' +
|
'source object and write permission on the destination ' +
|
||||||
'bucket to copy the object', done => {
|
'bucket to copy the object', done => {
|
||||||
s3.putObjectAcl({ Bucket: sourceBucketName,
|
s3.putObjectAcl({ Bucket: sourceBucketName,
|
||||||
|
@ -853,18 +826,16 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns no error when ETag match, with double quotes ' +
|
test('If-Match: returns no error when ETag match, with double quotes ' +
|
||||||
'around ETag',
|
'around ETag', done => {
|
||||||
done => {
|
|
||||||
requestCopy({ CopySourceIfMatch: etag }, err => {
|
requestCopy({ CopySourceIfMatch: etag }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns no error when one of ETags match, with double ' +
|
test('If-Match: returns no error when one of ETags match, with double ' +
|
||||||
'quotes around ETag',
|
'quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestCopy({ CopySourceIfMatch:
|
requestCopy({ CopySourceIfMatch:
|
||||||
`non-matching,${etag}` }, err => {
|
`non-matching,${etag}` }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
@ -872,18 +843,16 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns no error when ETag match, without double ' +
|
test('If-Match: returns no error when ETag match, without double ' +
|
||||||
'quotes around ETag',
|
'quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestCopy({ CopySourceIfMatch: etagTrim }, err => {
|
requestCopy({ CopySourceIfMatch: etagTrim }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns no error when one of ETags match, without ' +
|
test('If-Match: returns no error when one of ETags match, without ' +
|
||||||
'double quotes around ETag',
|
'double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestCopy({ CopySourceIfMatch:
|
requestCopy({ CopySourceIfMatch:
|
||||||
`non-matching,${etagTrim}` }, err => {
|
`non-matching,${etagTrim}` }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
@ -891,30 +860,28 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns no error when ETag match with *', done => {
|
test('If-Match: returns no error when ETag match with *', done => {
|
||||||
requestCopy({ CopySourceIfMatch: '*' }, err => {
|
requestCopy({ CopySourceIfMatch: '*' }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns PreconditionFailed when ETag does not match',
|
test('If-Match: returns PreconditionFailed when ETag does not match', done => {
|
||||||
done => {
|
|
||||||
requestCopy({ CopySourceIfMatch: 'non-matching ETag' }, err => {
|
requestCopy({ CopySourceIfMatch: 'non-matching ETag' }, err => {
|
||||||
checkError(err, 'PreconditionFailed');
|
checkError(err, 'PreconditionFailed');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns no error when ETag does not match', done => {
|
test('If-None-Match: returns no error when ETag does not match', done => {
|
||||||
requestCopy({ CopySourceIfNoneMatch: 'non-matching' }, err => {
|
requestCopy({ CopySourceIfNoneMatch: 'non-matching' }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns no error when all ETags do not match',
|
test('If-None-Match: returns no error when all ETags do not match', done => {
|
||||||
done => {
|
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfNoneMatch: 'non-matching,non-matching-either',
|
CopySourceIfNoneMatch: 'non-matching,non-matching-either',
|
||||||
}, err => {
|
}, err => {
|
||||||
|
@ -923,18 +890,16 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns PreconditionFailed when ETag match, with' +
|
test('If-None-Match: returns PreconditionFailed when ETag match, with' +
|
||||||
'double quotes around ETag',
|
'double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestCopy({ CopySourceIfNoneMatch: etag }, err => {
|
requestCopy({ CopySourceIfNoneMatch: etag }, err => {
|
||||||
checkError(err, 'PreconditionFailed');
|
checkError(err, 'PreconditionFailed');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns PreconditionFailed when one of ETags ' +
|
test('If-None-Match: returns PreconditionFailed when one of ETags ' +
|
||||||
'match, with double quotes around ETag',
|
'match, with double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfNoneMatch: `non-matching,${etag}`,
|
CopySourceIfNoneMatch: `non-matching,${etag}`,
|
||||||
}, err => {
|
}, err => {
|
||||||
|
@ -943,18 +908,16 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns PreconditionFailed when ETag match, ' +
|
test('If-None-Match: returns PreconditionFailed when ETag match, ' +
|
||||||
'without double quotes around ETag',
|
'without double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestCopy({ CopySourceIfNoneMatch: etagTrim }, err => {
|
requestCopy({ CopySourceIfNoneMatch: etagTrim }, err => {
|
||||||
checkError(err, 'PreconditionFailed');
|
checkError(err, 'PreconditionFailed');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns PreconditionFailed when one of ETags ' +
|
test('If-None-Match: returns PreconditionFailed when one of ETags ' +
|
||||||
'match, without double quotes around ETag',
|
'match, without double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfNoneMatch: `non-matching,${etagTrim}`,
|
CopySourceIfNoneMatch: `non-matching,${etagTrim}`,
|
||||||
}, err => {
|
}, err => {
|
||||||
|
@ -963,9 +926,8 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Modified-Since: returns no error if Last modified date is ' +
|
test('If-Modified-Since: returns no error if Last modified date is ' +
|
||||||
'greater',
|
'greater', done => {
|
||||||
done => {
|
|
||||||
requestCopy({ CopySourceIfModifiedSince: dateFromNow(-1) },
|
requestCopy({ CopySourceIfModifiedSince: dateFromNow(-1) },
|
||||||
err => {
|
err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
@ -975,9 +937,8 @@ describe('Object Copy', () => {
|
||||||
|
|
||||||
// Skipping this test, because real AWS does not provide error as
|
// Skipping this test, because real AWS does not provide error as
|
||||||
// expected
|
// expected
|
||||||
it.skip('If-Modified-Since: returns PreconditionFailed if Last ' +
|
test.skip('If-Modified-Since: returns PreconditionFailed if Last ' +
|
||||||
'modified date is lesser',
|
'modified date is lesser', done => {
|
||||||
done => {
|
|
||||||
requestCopy({ CopySourceIfModifiedSince: dateFromNow(1) },
|
requestCopy({ CopySourceIfModifiedSince: dateFromNow(1) },
|
||||||
err => {
|
err => {
|
||||||
checkError(err, 'PreconditionFailed');
|
checkError(err, 'PreconditionFailed');
|
||||||
|
@ -985,9 +946,8 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Modified-Since: returns PreconditionFailed if Last modified ' +
|
test('If-Modified-Since: returns PreconditionFailed if Last modified ' +
|
||||||
'date is equal',
|
'date is equal', done => {
|
||||||
done => {
|
|
||||||
requestCopy({ CopySourceIfModifiedSince:
|
requestCopy({ CopySourceIfModifiedSince:
|
||||||
dateConvert(lastModified) },
|
dateConvert(lastModified) },
|
||||||
err => {
|
err => {
|
||||||
|
@ -996,9 +956,8 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Unmodified-Since: returns no error when lastModified date is ' +
|
test('If-Unmodified-Since: returns no error when lastModified date is ' +
|
||||||
'greater',
|
'greater', done => {
|
||||||
done => {
|
|
||||||
requestCopy({ CopySourceIfUnmodifiedSince: dateFromNow(1) },
|
requestCopy({ CopySourceIfUnmodifiedSince: dateFromNow(1) },
|
||||||
err => {
|
err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
@ -1006,9 +965,8 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Unmodified-Since: returns no error when lastModified ' +
|
test('If-Unmodified-Since: returns no error when lastModified ' +
|
||||||
'date is equal',
|
'date is equal', done => {
|
||||||
done => {
|
|
||||||
requestCopy({ CopySourceIfUnmodifiedSince:
|
requestCopy({ CopySourceIfUnmodifiedSince:
|
||||||
dateConvert(lastModified) },
|
dateConvert(lastModified) },
|
||||||
err => {
|
err => {
|
||||||
|
@ -1017,9 +975,8 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Unmodified-Since: returns PreconditionFailed when ' +
|
test('If-Unmodified-Since: returns PreconditionFailed when ' +
|
||||||
'lastModified date is lesser',
|
'lastModified date is lesser', done => {
|
||||||
done => {
|
|
||||||
requestCopy({ CopySourceIfUnmodifiedSince: dateFromNow(-1) },
|
requestCopy({ CopySourceIfUnmodifiedSince: dateFromNow(-1) },
|
||||||
err => {
|
err => {
|
||||||
checkError(err, 'PreconditionFailed');
|
checkError(err, 'PreconditionFailed');
|
||||||
|
@ -1027,9 +984,8 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match & If-Unmodified-Since: returns no error when match Etag ' +
|
test('If-Match & If-Unmodified-Since: returns no error when match Etag ' +
|
||||||
'and lastModified is greater',
|
'and lastModified is greater', done => {
|
||||||
done => {
|
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfMatch: etagTrim,
|
CopySourceIfMatch: etagTrim,
|
||||||
CopySourceIfUnmodifiedSince: dateFromNow(-1),
|
CopySourceIfUnmodifiedSince: dateFromNow(-1),
|
||||||
|
@ -1039,7 +995,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match match & If-Unmodified-Since match', done => {
|
test('If-Match match & If-Unmodified-Since match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfMatch: etagTrim,
|
CopySourceIfMatch: etagTrim,
|
||||||
CopySourceIfUnmodifiedSince: dateFromNow(1),
|
CopySourceIfUnmodifiedSince: dateFromNow(1),
|
||||||
|
@ -1049,7 +1005,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match not match & If-Unmodified-Since not match', done => {
|
test('If-Match not match & If-Unmodified-Since not match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfMatch: 'non-matching',
|
CopySourceIfMatch: 'non-matching',
|
||||||
CopySourceIfUnmodifiedSince: dateFromNow(-1),
|
CopySourceIfUnmodifiedSince: dateFromNow(-1),
|
||||||
|
@ -1059,7 +1015,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match not match & If-Unmodified-Since match', done => {
|
test('If-Match not match & If-Unmodified-Since match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfMatch: 'non-matching',
|
CopySourceIfMatch: 'non-matching',
|
||||||
CopySourceIfUnmodifiedSince: dateFromNow(1),
|
CopySourceIfUnmodifiedSince: dateFromNow(1),
|
||||||
|
@ -1071,7 +1027,7 @@ describe('Object Copy', () => {
|
||||||
|
|
||||||
// Skipping this test, because real AWS does not provide error as
|
// Skipping this test, because real AWS does not provide error as
|
||||||
// expected
|
// expected
|
||||||
it.skip('If-Match match & If-Modified-Since not match', done => {
|
test.skip('If-Match match & If-Modified-Since not match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfMatch: etagTrim,
|
CopySourceIfMatch: etagTrim,
|
||||||
CopySourceIfModifiedSince: dateFromNow(1),
|
CopySourceIfModifiedSince: dateFromNow(1),
|
||||||
|
@ -1081,7 +1037,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match match & If-Modified-Since match', done => {
|
test('If-Match match & If-Modified-Since match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfMatch: etagTrim,
|
CopySourceIfMatch: etagTrim,
|
||||||
CopySourceIfModifiedSince: dateFromNow(-1),
|
CopySourceIfModifiedSince: dateFromNow(-1),
|
||||||
|
@ -1091,7 +1047,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match not match & If-Modified-Since not match', done => {
|
test('If-Match not match & If-Modified-Since not match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfMatch: 'non-matching',
|
CopySourceIfMatch: 'non-matching',
|
||||||
CopySourceIfModifiedSince: dateFromNow(1),
|
CopySourceIfModifiedSince: dateFromNow(1),
|
||||||
|
@ -1101,7 +1057,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match not match & If-Modified-Since match', done => {
|
test('If-Match not match & If-Modified-Since match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfMatch: 'non-matching',
|
CopySourceIfMatch: 'non-matching',
|
||||||
CopySourceIfModifiedSince: dateFromNow(-1),
|
CopySourceIfModifiedSince: dateFromNow(-1),
|
||||||
|
@ -1111,9 +1067,8 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match & If-Modified-Since: returns PreconditionFailed ' +
|
test('If-None-Match & If-Modified-Since: returns PreconditionFailed ' +
|
||||||
'when Etag does not match and lastModified is greater',
|
'when Etag does not match and lastModified is greater', done => {
|
||||||
done => {
|
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfNoneMatch: etagTrim,
|
CopySourceIfNoneMatch: etagTrim,
|
||||||
CopySourceIfModifiedSince: dateFromNow(-1),
|
CopySourceIfModifiedSince: dateFromNow(-1),
|
||||||
|
@ -1123,7 +1078,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match not match & If-Modified-Since not match', done => {
|
test('If-None-Match not match & If-Modified-Since not match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfNoneMatch: etagTrim,
|
CopySourceIfNoneMatch: etagTrim,
|
||||||
CopySourceIfModifiedSince: dateFromNow(1),
|
CopySourceIfModifiedSince: dateFromNow(1),
|
||||||
|
@ -1133,7 +1088,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match match & If-Modified-Since match', done => {
|
test('If-None-Match match & If-Modified-Since match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfNoneMatch: 'non-matching',
|
CopySourceIfNoneMatch: 'non-matching',
|
||||||
CopySourceIfModifiedSince: dateFromNow(-1),
|
CopySourceIfModifiedSince: dateFromNow(-1),
|
||||||
|
@ -1145,7 +1100,7 @@ describe('Object Copy', () => {
|
||||||
|
|
||||||
// Skipping this test, because real AWS does not provide error as
|
// Skipping this test, because real AWS does not provide error as
|
||||||
// expected
|
// expected
|
||||||
it.skip('If-None-Match match & If-Modified-Since not match', done => {
|
test.skip('If-None-Match match & If-Modified-Since not match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfNoneMatch: 'non-matching',
|
CopySourceIfNoneMatch: 'non-matching',
|
||||||
CopySourceIfModifiedSince: dateFromNow(1),
|
CopySourceIfModifiedSince: dateFromNow(1),
|
||||||
|
@ -1155,7 +1110,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match match & If-Unmodified-Since match', done => {
|
test('If-None-Match match & If-Unmodified-Since match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfNoneMatch: 'non-matching',
|
CopySourceIfNoneMatch: 'non-matching',
|
||||||
CopySourceIfUnmodifiedSince: dateFromNow(1),
|
CopySourceIfUnmodifiedSince: dateFromNow(1),
|
||||||
|
@ -1165,7 +1120,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match match & If-Unmodified-Since not match', done => {
|
test('If-None-Match match & If-Unmodified-Since not match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfNoneMatch: 'non-matching',
|
CopySourceIfNoneMatch: 'non-matching',
|
||||||
CopySourceIfUnmodifiedSince: dateFromNow(-1),
|
CopySourceIfUnmodifiedSince: dateFromNow(-1),
|
||||||
|
@ -1175,7 +1130,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match not match & If-Unmodified-Since match', done => {
|
test('If-None-Match not match & If-Unmodified-Since match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfNoneMatch: etagTrim,
|
CopySourceIfNoneMatch: etagTrim,
|
||||||
CopySourceIfUnmodifiedSince: dateFromNow(1),
|
CopySourceIfUnmodifiedSince: dateFromNow(1),
|
||||||
|
@ -1185,7 +1140,7 @@ describe('Object Copy', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match not match & If-Unmodified-Since not match', done => {
|
test('If-None-Match not match & If-Unmodified-Since not match', done => {
|
||||||
requestCopy({
|
requestCopy({
|
||||||
CopySourceIfNoneMatch: etagTrim,
|
CopySourceIfNoneMatch: etagTrim,
|
||||||
CopySourceIfUnmodifiedSince: dateFromNow(-1),
|
CopySourceIfUnmodifiedSince: dateFromNow(-1),
|
||||||
|
|
|
@ -9,13 +9,12 @@ const bucketName = 'alexbucketnottaken';
|
||||||
const objectName = 'someObject';
|
const objectName = 'someObject';
|
||||||
|
|
||||||
function checkNoError(err) {
|
function checkNoError(err) {
|
||||||
assert.equal(err, null,
|
expect(err).toEqual(null);
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkError(err, code) {
|
function checkError(err, code) {
|
||||||
assert.notEqual(err, null, 'Expected failure but got success');
|
expect(err).not.toEqual(null);
|
||||||
assert.strictEqual(err.code, code);
|
expect(err.code).toBe(code);
|
||||||
}
|
}
|
||||||
|
|
||||||
function dateFromNow(diff) {
|
function dateFromNow(diff) {
|
||||||
|
@ -36,7 +35,7 @@ describe('HEAD object, conditions', () => {
|
||||||
let etagTrim;
|
let etagTrim;
|
||||||
let lastModified;
|
let lastModified;
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
return bucketUtil.empty(bucketName).then(() =>
|
return bucketUtil.empty(bucketName).then(() =>
|
||||||
|
@ -72,68 +71,62 @@ describe('HEAD object, conditions', () => {
|
||||||
|
|
||||||
afterEach(() => bucketUtil.empty(bucketName));
|
afterEach(() => bucketUtil.empty(bucketName));
|
||||||
|
|
||||||
after(() => bucketUtil.deleteOne(bucketName));
|
afterAll(() => bucketUtil.deleteOne(bucketName));
|
||||||
|
|
||||||
it('If-Match: returns no error when ETag match, with double quotes ' +
|
test('If-Match: returns no error when ETag match, with double quotes ' +
|
||||||
'around ETag',
|
'around ETag', done => {
|
||||||
done => {
|
|
||||||
requestHead({ IfMatch: etag }, err => {
|
requestHead({ IfMatch: etag }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns no error when one of ETags match, with double ' +
|
test('If-Match: returns no error when one of ETags match, with double ' +
|
||||||
'quotes around ETag',
|
'quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestHead({ IfMatch: `non-matching,${etag}` }, err => {
|
requestHead({ IfMatch: `non-matching,${etag}` }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns no error when ETag match, without double ' +
|
test('If-Match: returns no error when ETag match, without double ' +
|
||||||
'quotes around ETag',
|
'quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestHead({ IfMatch: etagTrim }, err => {
|
requestHead({ IfMatch: etagTrim }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns no error when one of ETags match, without ' +
|
test('If-Match: returns no error when one of ETags match, without ' +
|
||||||
'double quotes around ETag',
|
'double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestHead({ IfMatch: `non-matching,${etagTrim}` }, err => {
|
requestHead({ IfMatch: `non-matching,${etagTrim}` }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns no error when ETag match with *', done => {
|
test('If-Match: returns no error when ETag match with *', done => {
|
||||||
requestHead({ IfMatch: '*' }, err => {
|
requestHead({ IfMatch: '*' }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match: returns PreconditionFailed when ETag does not match',
|
test('If-Match: returns PreconditionFailed when ETag does not match', done => {
|
||||||
done => {
|
|
||||||
requestHead({ IfMatch: 'non-matching ETag' }, err => {
|
requestHead({ IfMatch: 'non-matching ETag' }, err => {
|
||||||
checkError(err, errors.PreconditionFailed.code);
|
checkError(err, errors.PreconditionFailed.code);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns no error when ETag does not match', done => {
|
test('If-None-Match: returns no error when ETag does not match', done => {
|
||||||
requestHead({ IfNoneMatch: 'non-matching' }, err => {
|
requestHead({ IfNoneMatch: 'non-matching' }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns no error when all ETags do not match',
|
test('If-None-Match: returns no error when all ETags do not match', done => {
|
||||||
done => {
|
|
||||||
requestHead({
|
requestHead({
|
||||||
IfNoneMatch: 'non-matching,non-matching-either',
|
IfNoneMatch: 'non-matching,non-matching-either',
|
||||||
}, err => {
|
}, err => {
|
||||||
|
@ -142,18 +135,16 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns NotModified when ETag match, with double ' +
|
test('If-None-Match: returns NotModified when ETag match, with double ' +
|
||||||
'quotes around ETag',
|
'quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestHead({ IfNoneMatch: etag }, err => {
|
requestHead({ IfNoneMatch: etag }, err => {
|
||||||
checkError(err, 'NotModified');
|
checkError(err, 'NotModified');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns NotModified when one of ETags match, with ' +
|
test('If-None-Match: returns NotModified when one of ETags match, with ' +
|
||||||
'double quotes around ETag',
|
'double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestHead({
|
requestHead({
|
||||||
IfNoneMatch: `non-matching,${etag}`,
|
IfNoneMatch: `non-matching,${etag}`,
|
||||||
}, err => {
|
}, err => {
|
||||||
|
@ -162,18 +153,16 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns NotModified when ETag match, without ' +
|
test('If-None-Match: returns NotModified when ETag match, without ' +
|
||||||
'double quotes around ETag',
|
'double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestHead({ IfNoneMatch: etagTrim }, err => {
|
requestHead({ IfNoneMatch: etagTrim }, err => {
|
||||||
checkError(err, 'NotModified');
|
checkError(err, 'NotModified');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match: returns NotModified when one of ETags match, ' +
|
test('If-None-Match: returns NotModified when one of ETags match, ' +
|
||||||
'without double quotes around ETag',
|
'without double quotes around ETag', done => {
|
||||||
done => {
|
|
||||||
requestHead({
|
requestHead({
|
||||||
IfNoneMatch: `non-matching,${etagTrim}`,
|
IfNoneMatch: `non-matching,${etagTrim}`,
|
||||||
}, err => {
|
}, err => {
|
||||||
|
@ -182,9 +171,8 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Modified-Since: returns no error if Last modified date is ' +
|
test('If-Modified-Since: returns no error if Last modified date is ' +
|
||||||
'greater',
|
'greater', done => {
|
||||||
done => {
|
|
||||||
requestHead({ IfModifiedSince: dateFromNow(-1) },
|
requestHead({ IfModifiedSince: dateFromNow(-1) },
|
||||||
err => {
|
err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
@ -194,9 +182,8 @@ describe('HEAD object, conditions', () => {
|
||||||
|
|
||||||
// Skipping this test, because real AWS does not provide error as
|
// Skipping this test, because real AWS does not provide error as
|
||||||
// expected
|
// expected
|
||||||
it.skip('If-Modified-Since: returns NotModified if Last modified ' +
|
test.skip('If-Modified-Since: returns NotModified if Last modified ' +
|
||||||
'date is lesser',
|
'date is lesser', done => {
|
||||||
done => {
|
|
||||||
requestHead({ IfModifiedSince: dateFromNow(1) },
|
requestHead({ IfModifiedSince: dateFromNow(1) },
|
||||||
err => {
|
err => {
|
||||||
checkError(err, 'NotModified');
|
checkError(err, 'NotModified');
|
||||||
|
@ -204,9 +191,8 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Modified-Since: returns NotModified if Last modified ' +
|
test('If-Modified-Since: returns NotModified if Last modified ' +
|
||||||
'date is equal',
|
'date is equal', done => {
|
||||||
done => {
|
|
||||||
requestHead({ IfModifiedSince: dateConvert(lastModified) },
|
requestHead({ IfModifiedSince: dateConvert(lastModified) },
|
||||||
err => {
|
err => {
|
||||||
checkError(err, 'NotModified');
|
checkError(err, 'NotModified');
|
||||||
|
@ -214,18 +200,16 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Unmodified-Since: returns no error when lastModified date is ' +
|
test('If-Unmodified-Since: returns no error when lastModified date is ' +
|
||||||
'greater',
|
'greater', done => {
|
||||||
done => {
|
|
||||||
requestHead({ IfUnmodifiedSince: dateFromNow(1) }, err => {
|
requestHead({ IfUnmodifiedSince: dateFromNow(1) }, err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Unmodified-Since: returns no error when lastModified ' +
|
test('If-Unmodified-Since: returns no error when lastModified ' +
|
||||||
'date is equal',
|
'date is equal', done => {
|
||||||
done => {
|
|
||||||
requestHead({ IfUnmodifiedSince: dateConvert(lastModified) },
|
requestHead({ IfUnmodifiedSince: dateConvert(lastModified) },
|
||||||
err => {
|
err => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
|
@ -233,18 +217,16 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Unmodified-Since: returns PreconditionFailed when ' +
|
test('If-Unmodified-Since: returns PreconditionFailed when ' +
|
||||||
'lastModified date is lesser',
|
'lastModified date is lesser', done => {
|
||||||
done => {
|
|
||||||
requestHead({ IfUnmodifiedSince: dateFromNow(-1) }, err => {
|
requestHead({ IfUnmodifiedSince: dateFromNow(-1) }, err => {
|
||||||
checkError(err, errors.PreconditionFailed.code);
|
checkError(err, errors.PreconditionFailed.code);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match & If-Unmodified-Since: returns no error when match Etag ' +
|
test('If-Match & If-Unmodified-Since: returns no error when match Etag ' +
|
||||||
'and lastModified is greater',
|
'and lastModified is greater', done => {
|
||||||
done => {
|
|
||||||
requestHead({
|
requestHead({
|
||||||
IfMatch: etagTrim,
|
IfMatch: etagTrim,
|
||||||
IfUnmodifiedSince: dateFromNow(-1),
|
IfUnmodifiedSince: dateFromNow(-1),
|
||||||
|
@ -254,7 +236,7 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match match & If-Unmodified-Since match', done => {
|
test('If-Match match & If-Unmodified-Since match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfMatch: etagTrim,
|
IfMatch: etagTrim,
|
||||||
IfUnmodifiedSince: dateFromNow(1),
|
IfUnmodifiedSince: dateFromNow(1),
|
||||||
|
@ -264,7 +246,7 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match not match & If-Unmodified-Since not match', done => {
|
test('If-Match not match & If-Unmodified-Since not match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfMatch: 'non-matching',
|
IfMatch: 'non-matching',
|
||||||
IfUnmodifiedSince: dateFromNow(-1),
|
IfUnmodifiedSince: dateFromNow(-1),
|
||||||
|
@ -274,7 +256,7 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match not match & If-Unmodified-Since match', done => {
|
test('If-Match not match & If-Unmodified-Since match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfMatch: 'non-matching',
|
IfMatch: 'non-matching',
|
||||||
IfUnmodifiedSince: dateFromNow(1),
|
IfUnmodifiedSince: dateFromNow(1),
|
||||||
|
@ -286,7 +268,7 @@ describe('HEAD object, conditions', () => {
|
||||||
|
|
||||||
// Skipping this test, because real AWS does not provide error as
|
// Skipping this test, because real AWS does not provide error as
|
||||||
// expected
|
// expected
|
||||||
it.skip('If-Match match & If-Modified-Since not match', done => {
|
test.skip('If-Match match & If-Modified-Since not match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfMatch: etagTrim,
|
IfMatch: etagTrim,
|
||||||
IfModifiedSince: dateFromNow(1),
|
IfModifiedSince: dateFromNow(1),
|
||||||
|
@ -296,7 +278,7 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match match & If-Modified-Since match', done => {
|
test('If-Match match & If-Modified-Since match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfMatch: etagTrim,
|
IfMatch: etagTrim,
|
||||||
IfModifiedSince: dateFromNow(-1),
|
IfModifiedSince: dateFromNow(-1),
|
||||||
|
@ -306,7 +288,7 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match not match & If-Modified-Since not match', done => {
|
test('If-Match not match & If-Modified-Since not match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfMatch: 'non-matching',
|
IfMatch: 'non-matching',
|
||||||
IfModifiedSince: dateFromNow(1),
|
IfModifiedSince: dateFromNow(1),
|
||||||
|
@ -316,7 +298,7 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-Match not match & If-Modified-Since match', done => {
|
test('If-Match not match & If-Modified-Since match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfMatch: 'non-matching',
|
IfMatch: 'non-matching',
|
||||||
IfModifiedSince: dateFromNow(-1),
|
IfModifiedSince: dateFromNow(-1),
|
||||||
|
@ -326,9 +308,8 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match & If-Modified-Since: returns NotModified when Etag ' +
|
test('If-None-Match & If-Modified-Since: returns NotModified when Etag ' +
|
||||||
'does not match and lastModified is greater',
|
'does not match and lastModified is greater', done => {
|
||||||
done => {
|
|
||||||
requestHead({
|
requestHead({
|
||||||
IfNoneMatch: etagTrim,
|
IfNoneMatch: etagTrim,
|
||||||
IfModifiedSince: dateFromNow(-1),
|
IfModifiedSince: dateFromNow(-1),
|
||||||
|
@ -338,7 +319,7 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match not match & If-Modified-Since not match', done => {
|
test('If-None-Match not match & If-Modified-Since not match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfNoneMatch: etagTrim,
|
IfNoneMatch: etagTrim,
|
||||||
IfModifiedSince: dateFromNow(1),
|
IfModifiedSince: dateFromNow(1),
|
||||||
|
@ -348,7 +329,7 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match match & If-Modified-Since match', done => {
|
test('If-None-Match match & If-Modified-Since match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfNoneMatch: 'non-matching',
|
IfNoneMatch: 'non-matching',
|
||||||
IfModifiedSince: dateFromNow(-1),
|
IfModifiedSince: dateFromNow(-1),
|
||||||
|
@ -360,7 +341,7 @@ describe('HEAD object, conditions', () => {
|
||||||
|
|
||||||
// Skipping this test, because real AWS does not provide error as
|
// Skipping this test, because real AWS does not provide error as
|
||||||
// expected
|
// expected
|
||||||
it.skip('If-None-Match match & If-Modified-Since not match', done => {
|
test.skip('If-None-Match match & If-Modified-Since not match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfNoneMatch: 'non-matching',
|
IfNoneMatch: 'non-matching',
|
||||||
IfModifiedSince: dateFromNow(1),
|
IfModifiedSince: dateFromNow(1),
|
||||||
|
@ -370,7 +351,7 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match match & If-Unmodified-Since match', done => {
|
test('If-None-Match match & If-Unmodified-Since match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfNoneMatch: 'non-matching',
|
IfNoneMatch: 'non-matching',
|
||||||
IfUnmodifiedSince: dateFromNow(1),
|
IfUnmodifiedSince: dateFromNow(1),
|
||||||
|
@ -380,7 +361,7 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match match & If-Unmodified-Since not match', done => {
|
test('If-None-Match match & If-Unmodified-Since not match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfNoneMatch: 'non-matching',
|
IfNoneMatch: 'non-matching',
|
||||||
IfUnmodifiedSince: dateFromNow(-1),
|
IfUnmodifiedSince: dateFromNow(-1),
|
||||||
|
@ -390,7 +371,7 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match not match & If-Unmodified-Since match', done => {
|
test('If-None-Match not match & If-Unmodified-Since match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfNoneMatch: etagTrim,
|
IfNoneMatch: etagTrim,
|
||||||
IfUnmodifiedSince: dateFromNow(1),
|
IfUnmodifiedSince: dateFromNow(1),
|
||||||
|
@ -400,7 +381,7 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('If-None-Match not match & If-Unmodified-Since not match', done => {
|
test('If-None-Match not match & If-Unmodified-Since not match', done => {
|
||||||
requestHead({
|
requestHead({
|
||||||
IfNoneMatch: etagTrim,
|
IfNoneMatch: etagTrim,
|
||||||
IfUnmodifiedSince: dateFromNow(-1),
|
IfUnmodifiedSince: dateFromNow(-1),
|
||||||
|
@ -410,7 +391,7 @@ describe('HEAD object, conditions', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('WebsiteRedirectLocation is set & it appears in response', done => {
|
test('WebsiteRedirectLocation is set & it appears in response', done => {
|
||||||
const redirBktwBody = {
|
const redirBktwBody = {
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
Key: 'redir_present',
|
Key: 'redir_present',
|
||||||
|
@ -425,18 +406,16 @@ describe('HEAD object, conditions', () => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
s3.headObject(redirBkt, (err, data) => {
|
s3.headObject(redirBkt, (err, data) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual(data.WebsiteRedirectLocation,
|
expect(data.WebsiteRedirectLocation).toBe('http://google.com');
|
||||||
'http://google.com');
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('WebsiteRedirectLocation is not set & is absent', done => {
|
test('WebsiteRedirectLocation is not set & is absent', done => {
|
||||||
requestHead({}, (err, data) => {
|
requestHead({}, (err, data) => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
assert.strictEqual('WebsiteRedirectLocation' in data,
|
expect('WebsiteRedirectLocation' in data).toBe(false);
|
||||||
false, 'WebsiteRedirectLocation header is present.');
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -18,7 +18,7 @@ describe('HEAD object, compatibility headers [Cache-Control, ' +
|
||||||
// a UNIX timestamp for Expires header
|
// a UNIX timestamp for Expires header
|
||||||
const expires = new Date();
|
const expires = new Date();
|
||||||
|
|
||||||
before(() => {
|
beforeAll(() => {
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
return bucketUtil.empty(bucketName).then(() =>
|
return bucketUtil.empty(bucketName).then(() =>
|
||||||
|
@ -48,29 +48,25 @@ describe('HEAD object, compatibility headers [Cache-Control, ' +
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('deleting bucket');
|
process.stdout.write('deleting bucket');
|
||||||
return bucketUtil.empty(bucketName).then(() =>
|
return bucketUtil.empty(bucketName).then(() =>
|
||||||
bucketUtil.deleteOne(bucketName));
|
bucketUtil.deleteOne(bucketName));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return additional headers if specified in objectPUT ' +
|
test('should return additional headers if specified in objectPUT ' +
|
||||||
'request', done => {
|
'request', done => {
|
||||||
s3.headObject({ Bucket: bucketName, Key: objectName },
|
s3.headObject({ Bucket: bucketName, Key: objectName },
|
||||||
(err, res) => {
|
(err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return done(err);
|
return done(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.CacheControl,
|
expect(res.CacheControl).toBe(cacheControl);
|
||||||
cacheControl);
|
expect(res.ContentDisposition).toBe(contentDisposition);
|
||||||
assert.strictEqual(res.ContentDisposition,
|
|
||||||
contentDisposition);
|
|
||||||
// Should remove V4 streaming value 'aws-chunked'
|
// Should remove V4 streaming value 'aws-chunked'
|
||||||
// to be compatible with AWS behavior
|
// to be compatible with AWS behavior
|
||||||
assert.strictEqual(res.ContentEncoding,
|
expect(res.ContentEncoding).toBe('gzip,');
|
||||||
'gzip,');
|
expect(res.Expires).toBe(expires.toUTCString());
|
||||||
assert.strictEqual(res.Expires,
|
|
||||||
expires.toUTCString());
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -22,7 +22,7 @@ describe("Head object 'ReplicationStatus' value", () => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(res.ReplicationStatus, expectedStatus);
|
expect(res.ReplicationStatus).toBe(expectedStatus);
|
||||||
return next();
|
return next();
|
||||||
}),
|
}),
|
||||||
], cb);
|
], cb);
|
||||||
|
@ -41,8 +41,10 @@ describe("Head object 'ReplicationStatus' value", () => {
|
||||||
next => s3.deleteBucket({ Bucket: sourceBucket }, next),
|
next => s3.deleteBucket({ Bucket: sourceBucket }, next),
|
||||||
], done));
|
], done));
|
||||||
|
|
||||||
it('should be `undefined` when there is no bucket replication config',
|
test(
|
||||||
done => checkHeadObj(`${keyPrefix}-foobar`, undefined, done));
|
'should be `undefined` when there is no bucket replication config',
|
||||||
|
done => checkHeadObj(`${keyPrefix}-foobar`, undefined, done)
|
||||||
|
);
|
||||||
|
|
||||||
describe('With bucket replication config', () => {
|
describe('With bucket replication config', () => {
|
||||||
beforeEach(done => s3.putBucketReplication({
|
beforeEach(done => s3.putBucketReplication({
|
||||||
|
@ -60,11 +62,15 @@ describe("Head object 'ReplicationStatus' value", () => {
|
||||||
},
|
},
|
||||||
}, done));
|
}, done));
|
||||||
|
|
||||||
it("should be 'PENDING' when object key prefix applies",
|
test(
|
||||||
done => checkHeadObj(`${keyPrefix}-foobar`, 'PENDING', done));
|
"should be 'PENDING' when object key prefix applies",
|
||||||
|
done => checkHeadObj(`${keyPrefix}-foobar`, 'PENDING', done)
|
||||||
|
);
|
||||||
|
|
||||||
it('should be `undefined` when object key prefix does not apply',
|
test(
|
||||||
done => checkHeadObj(`foobar-${keyPrefix}`, undefined, done));
|
'should be `undefined` when object key prefix does not apply',
|
||||||
|
done => checkHeadObj(`foobar-${keyPrefix}`, undefined, done)
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -21,7 +21,7 @@ describe('Put object with same key as prior object', () => {
|
||||||
let s3;
|
let s3;
|
||||||
let bucketName;
|
let bucketName;
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
s3 = bucketUtil.s3;
|
s3 = bucketUtil.s3;
|
||||||
bucketUtil.createRandom(1)
|
bucketUtil.createRandom(1)
|
||||||
|
@ -45,9 +45,10 @@ describe('Put object with same key as prior object', () => {
|
||||||
|
|
||||||
afterEach(() => bucketUtil.empty(bucketName));
|
afterEach(() => bucketUtil.empty(bucketName));
|
||||||
|
|
||||||
after(() => bucketUtil.deleteOne(bucketName));
|
afterAll(() => bucketUtil.deleteOne(bucketName));
|
||||||
|
|
||||||
it('should overwrite all user metadata and data on overwrite put',
|
test(
|
||||||
|
'should overwrite all user metadata and data on overwrite put',
|
||||||
() => s3.putObjectAsync({
|
() => s3.putObjectAsync({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
Key: objectName,
|
Key: objectName,
|
||||||
|
@ -59,6 +60,7 @@ describe('Put object with same key as prior object', () => {
|
||||||
assert.deepStrictEqual(res.Metadata, secondPutMetadata);
|
assert.deepStrictEqual(res.Metadata, secondPutMetadata);
|
||||||
assert.deepStrictEqual(res.Body.toString(),
|
assert.deepStrictEqual(res.Body.toString(),
|
||||||
'Much different');
|
'Much different');
|
||||||
}));
|
})
|
||||||
|
);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -11,9 +11,9 @@ const bucket = 'bucket2putstuffin4324242';
|
||||||
const object = 'object2putstuffin';
|
const object = 'object2putstuffin';
|
||||||
|
|
||||||
function _checkError(err, code, statusCode) {
|
function _checkError(err, code, statusCode) {
|
||||||
assert(err, 'Expected error but found none');
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, code);
|
expect(err.code).toBe(code);
|
||||||
assert.strictEqual(err.statusCode, statusCode);
|
expect(err.statusCode).toBe(statusCode);
|
||||||
}
|
}
|
||||||
|
|
||||||
function generateMultipleTagQuery(numberOfTag) {
|
function generateMultipleTagQuery(numberOfTag) {
|
||||||
|
@ -56,18 +56,16 @@ describe('PUT object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put an object and set the acl via query param',
|
test('should put an object and set the acl via query param', done => {
|
||||||
done => {
|
|
||||||
const params = { Bucket: bucket, Key: 'key',
|
const params = { Bucket: bucket, Key: 'key',
|
||||||
ACL: 'public-read', StorageClass: 'STANDARD' };
|
ACL: 'public-read', StorageClass: 'STANDARD' };
|
||||||
const url = s3.getSignedUrl('putObject', params);
|
const url = s3.getSignedUrl('putObject', params);
|
||||||
provideRawOutput(['-verbose', '-X', 'PUT', url,
|
provideRawOutput(['-verbose', '-X', 'PUT', url,
|
||||||
'--upload-file', 'uploadFile'], httpCode => {
|
'--upload-file', 'uploadFile'], httpCode => {
|
||||||
assert.strictEqual(httpCode, '200 OK');
|
expect(httpCode).toBe('200 OK');
|
||||||
s3.getObjectAcl({ Bucket: bucket, Key: 'key' },
|
s3.getObjectAcl({ Bucket: bucket, Key: 'key' },
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
|
||||||
assert.deepStrictEqual(result.Grants[1], { Grantee:
|
assert.deepStrictEqual(result.Grants[1], { Grantee:
|
||||||
{ Type: 'Group', URI:
|
{ Type: 'Group', URI:
|
||||||
'http://acs.amazonaws.com/groups/global/AllUsers',
|
'http://acs.amazonaws.com/groups/global/AllUsers',
|
||||||
|
@ -77,67 +75,67 @@ describe('PUT object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put an object with key slash',
|
test('should put an object with key slash', done => {
|
||||||
done => {
|
|
||||||
const params = { Bucket: bucket, Key: '/' };
|
const params = { Bucket: bucket, Key: '/' };
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return error if putting object w/ > 2KB user-defined md',
|
test(
|
||||||
|
'should return error if putting object w/ > 2KB user-defined md',
|
||||||
done => {
|
done => {
|
||||||
const metadata = genMaxSizeMetaHeaders();
|
const metadata = genMaxSizeMetaHeaders();
|
||||||
const params = { Bucket: bucket, Key: '/', Metadata: metadata };
|
const params = { Bucket: bucket, Key: '/', Metadata: metadata };
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.strictEqual(err, null, `Unexpected err: ${err}`);
|
expect(err).toBe(null);
|
||||||
// add one more byte to be over the limit
|
// add one more byte to be over the limit
|
||||||
metadata.header0 = `${metadata.header0}${'0'}`;
|
metadata.header0 = `${metadata.header0}${'0'}`;
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert(err, 'Expected err but did not find one');
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, 'MetadataTooLarge');
|
expect(err.code).toBe('MetadataTooLarge');
|
||||||
assert.strictEqual(err.statusCode, 400);
|
expect(err.statusCode).toBe(400);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return Not Implemented error for obj. encryption using ' +
|
test('should return Not Implemented error for obj. encryption using ' +
|
||||||
'AWS-managed encryption keys', done => {
|
'AWS-managed encryption keys', done => {
|
||||||
const params = { Bucket: bucket, Key: 'key',
|
const params = { Bucket: bucket, Key: 'key',
|
||||||
ServerSideEncryption: 'AES256' };
|
ServerSideEncryption: 'AES256' };
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.strictEqual(err.code, 'NotImplemented');
|
expect(err.code).toBe('NotImplemented');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return Not Implemented error for obj. encryption using ' +
|
test('should return Not Implemented error for obj. encryption using ' +
|
||||||
'customer-provided encryption keys', done => {
|
'customer-provided encryption keys', done => {
|
||||||
const params = { Bucket: bucket, Key: 'key',
|
const params = { Bucket: bucket, Key: 'key',
|
||||||
SSECustomerAlgorithm: 'AES256' };
|
SSECustomerAlgorithm: 'AES256' };
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.strictEqual(err.code, 'NotImplemented');
|
expect(err.code).toBe('NotImplemented');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidRedirectLocation if putting object ' +
|
test('should return InvalidRedirectLocation if putting object ' +
|
||||||
'with x-amz-website-redirect-location header that does not start ' +
|
'with x-amz-website-redirect-location header that does not start ' +
|
||||||
'with \'http://\', \'https://\' or \'/\'', done => {
|
'with \'http://\', \'https://\' or \'/\'', done => {
|
||||||
const params = { Bucket: bucket, Key: 'key',
|
const params = { Bucket: bucket, Key: 'key',
|
||||||
WebsiteRedirectLocation: 'google.com' };
|
WebsiteRedirectLocation: 'google.com' };
|
||||||
s3.putObject(params, err => {
|
s3.putObject(params, err => {
|
||||||
assert.strictEqual(err.code, 'InvalidRedirectLocation');
|
expect(err.code).toBe('InvalidRedirectLocation');
|
||||||
assert.strictEqual(err.statusCode, 400);
|
expect(err.statusCode).toBe(400);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
describe('Put object with tag set', () => {
|
describe('Put object with tag set', () => {
|
||||||
taggingTests.forEach(taggingTest => {
|
taggingTests.forEach(taggingTest => {
|
||||||
it(taggingTest.it, done => {
|
test(taggingTest.it, done => {
|
||||||
const key = encodeURIComponent(taggingTest.tag.key);
|
const key = encodeURIComponent(taggingTest.tag.key);
|
||||||
const value = encodeURIComponent(taggingTest.tag.value);
|
const value = encodeURIComponent(taggingTest.tag.value);
|
||||||
const tagging = `${key}=${value}`;
|
const tagging = `${key}=${value}`;
|
||||||
|
@ -148,12 +146,10 @@ describe('PUT object', () => {
|
||||||
_checkError(err, taggingTest.error, 400);
|
_checkError(err, taggingTest.error, 400);
|
||||||
return done();
|
return done();
|
||||||
}
|
}
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
|
||||||
return s3.getObjectTagging({ Bucket: bucket,
|
return s3.getObjectTagging({ Bucket: bucket,
|
||||||
Key: object }, (err, data) => {
|
Key: object }, (err, data) => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
|
||||||
assert.deepStrictEqual(data.TagSet[0], {
|
assert.deepStrictEqual(data.TagSet[0], {
|
||||||
Key: taggingTest.tag.key,
|
Key: taggingTest.tag.key,
|
||||||
Value: taggingTest.tag.value });
|
Value: taggingTest.tag.value });
|
||||||
|
@ -162,39 +158,33 @@ describe('PUT object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should be able to put object with 10 tags',
|
test('should be able to put object with 10 tags', done => {
|
||||||
done => {
|
|
||||||
const taggingConfig = generateMultipleTagQuery(10);
|
const taggingConfig = generateMultipleTagQuery(10);
|
||||||
s3.putObject({ Bucket: bucket, Key: object,
|
s3.putObject({ Bucket: bucket, Key: object,
|
||||||
Tagging: taggingConfig }, err => {
|
Tagging: taggingConfig }, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should be able to put an empty Tag set', done => {
|
test('should be able to put an empty Tag set', done => {
|
||||||
s3.putObject({ Bucket: bucket, Key: object,
|
s3.putObject({ Bucket: bucket, Key: object,
|
||||||
Tagging: '',
|
Tagging: '',
|
||||||
}, err => {
|
}, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should be able to put object with empty tags',
|
test('should be able to put object with empty tags', done => {
|
||||||
done => {
|
|
||||||
s3.putObject({ Bucket: bucket, Key: object,
|
s3.putObject({ Bucket: bucket, Key: object,
|
||||||
Tagging: '&&&&&&&&&&&&&&&&&key1=value1' }, err => {
|
Tagging: '&&&&&&&&&&&&&&&&&key1=value1' }, err => {
|
||||||
assert.equal(err, null, 'Expected success, ' +
|
expect(err).toEqual(null);
|
||||||
`got error ${JSON.stringify(err)}`);
|
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return BadRequest if putting more that 10 tags',
|
test('should return BadRequest if putting more that 10 tags', done => {
|
||||||
done => {
|
|
||||||
const taggingConfig = generateMultipleTagQuery(11);
|
const taggingConfig = generateMultipleTagQuery(11);
|
||||||
s3.putObject({ Bucket: bucket, Key: object,
|
s3.putObject({ Bucket: bucket, Key: object,
|
||||||
Tagging: taggingConfig }, err => {
|
Tagging: taggingConfig }, err => {
|
||||||
|
@ -203,8 +193,7 @@ describe('PUT object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidArgument if using the same key twice',
|
test('should return InvalidArgument if using the same key twice', done => {
|
||||||
done => {
|
|
||||||
s3.putObject({ Bucket: bucket, Key: object,
|
s3.putObject({ Bucket: bucket, Key: object,
|
||||||
Tagging: 'key1=value1&key1=value2' }, err => {
|
Tagging: 'key1=value1&key1=value2' }, err => {
|
||||||
_checkError(err, 'InvalidArgument', 400);
|
_checkError(err, 'InvalidArgument', 400);
|
||||||
|
@ -212,7 +201,7 @@ describe('PUT object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidArgument if using the same key twice ' +
|
test('should return InvalidArgument if using the same key twice ' +
|
||||||
'and empty tags', done => {
|
'and empty tags', done => {
|
||||||
s3.putObject({ Bucket: bucket, Key: object,
|
s3.putObject({ Bucket: bucket, Key: object,
|
||||||
Tagging: '&&&&&&&&&&&&&&&&&key1=value1&key1=value2' },
|
Tagging: '&&&&&&&&&&&&&&&&&key1=value1&key1=value2' },
|
||||||
|
@ -222,7 +211,7 @@ describe('PUT object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidArgument if tag with no key', done => {
|
test('should return InvalidArgument if tag with no key', done => {
|
||||||
s3.putObject({ Bucket: bucket, Key: object,
|
s3.putObject({ Bucket: bucket, Key: object,
|
||||||
Tagging: '=value1',
|
Tagging: '=value1',
|
||||||
}, err => {
|
}, err => {
|
||||||
|
@ -231,7 +220,7 @@ describe('PUT object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidArgument putting object with ' +
|
test('should return InvalidArgument putting object with ' +
|
||||||
'bad encoded tags', done => {
|
'bad encoded tags', done => {
|
||||||
s3.putObject({ Bucket: bucket, Key: object, Tagging:
|
s3.putObject({ Bucket: bucket, Key: object, Tagging:
|
||||||
'key1==value1' }, err => {
|
'key1==value1' }, err => {
|
||||||
|
@ -239,7 +228,7 @@ describe('PUT object', () => {
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
it('should return InvalidArgument putting object tag with ' +
|
test('should return InvalidArgument putting object tag with ' +
|
||||||
'invalid characters: %', done => {
|
'invalid characters: %', done => {
|
||||||
const value = 'value1%';
|
const value = 'value1%';
|
||||||
s3.putObject({ Bucket: bucket, Key: object, Tagging:
|
s3.putObject({ Bucket: bucket, Key: object, Tagging:
|
||||||
|
|
|
@ -46,7 +46,7 @@ describe('PUT Object ACL', () => {
|
||||||
const s3 = bucketUtil.s3;
|
const s3 = bucketUtil.s3;
|
||||||
const Key = 'aclTest';
|
const Key = 'aclTest';
|
||||||
|
|
||||||
before(done => {
|
beforeAll(done => {
|
||||||
bucketUtil.createRandom(1)
|
bucketUtil.createRandom(1)
|
||||||
.then(created => {
|
.then(created => {
|
||||||
bucketName = created;
|
bucketName = created;
|
||||||
|
@ -60,12 +60,12 @@ describe('PUT Object ACL', () => {
|
||||||
return bucketUtil.empty(bucketName);
|
return bucketUtil.empty(bucketName);
|
||||||
});
|
});
|
||||||
|
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('deleting bucket');
|
process.stdout.write('deleting bucket');
|
||||||
return bucketUtil.deleteOne(bucketName);
|
return bucketUtil.deleteOne(bucketName);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put object ACLs', done => {
|
test('should put object ACLs', done => {
|
||||||
const s3 = bucketUtil.s3;
|
const s3 = bucketUtil.s3;
|
||||||
const Bucket = bucketName;
|
const Bucket = bucketName;
|
||||||
const objects = [
|
const objects = [
|
||||||
|
@ -77,13 +77,13 @@ describe('PUT Object ACL', () => {
|
||||||
.then(() => s3.putObjectAclAsync({ Bucket, Key,
|
.then(() => s3.putObjectAclAsync({ Bucket, Key,
|
||||||
ACL: 'public-read' }))
|
ACL: 'public-read' }))
|
||||||
.then(data => {
|
.then(data => {
|
||||||
assert(data);
|
expect(data).toBeTruthy();
|
||||||
done();
|
done();
|
||||||
})
|
})
|
||||||
.catch(done);
|
.catch(done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return NoSuchKey if try to put object ACLs ' +
|
test('should return NoSuchKey if try to put object ACLs ' +
|
||||||
'for nonexistent object', done => {
|
'for nonexistent object', done => {
|
||||||
const s3 = bucketUtil.s3;
|
const s3 = bucketUtil.s3;
|
||||||
const Bucket = bucketName;
|
const Bucket = bucketName;
|
||||||
|
@ -92,16 +92,16 @@ describe('PUT Object ACL', () => {
|
||||||
Bucket,
|
Bucket,
|
||||||
Key,
|
Key,
|
||||||
ACL: 'public-read' }, err => {
|
ACL: 'public-read' }, err => {
|
||||||
assert(err);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.statusCode, 404);
|
expect(err.statusCode).toBe(404);
|
||||||
assert.strictEqual(err.code, 'NoSuchKey');
|
expect(err.code).toBe('NoSuchKey');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('on an object', () => {
|
describe('on an object', () => {
|
||||||
before(done => s3.putObject({ Bucket: bucketName, Key }, done));
|
beforeAll(done => s3.putObject({ Bucket: bucketName, Key }, done));
|
||||||
after(() => {
|
afterAll(() => {
|
||||||
process.stdout.write('deleting bucket');
|
process.stdout.write('deleting bucket');
|
||||||
return bucketUtil.empty(bucketName);
|
return bucketUtil.empty(bucketName);
|
||||||
});
|
});
|
||||||
|
@ -118,9 +118,9 @@ describe('PUT Object ACL', () => {
|
||||||
AccessControlPolicy: acp,
|
AccessControlPolicy: acp,
|
||||||
};
|
};
|
||||||
s3.putObjectAcl(putAclParams, err => {
|
s3.putObjectAcl(putAclParams, err => {
|
||||||
assert(err);
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.statusCode, 403);
|
expect(err.statusCode).toBe(403);
|
||||||
assert.strictEqual(err.code, 'AccessDenied');
|
expect(err.code).toBe('AccessDenied');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -36,9 +36,9 @@ function generateTaggingConfig(key, value) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function _checkError(err, code, statusCode) {
|
function _checkError(err, code, statusCode) {
|
||||||
assert(err, 'Expected error but found none');
|
expect(err).toBeTruthy();
|
||||||
assert.strictEqual(err.code, code);
|
expect(err.code).toBe(code);
|
||||||
assert.strictEqual(err.statusCode, statusCode);
|
expect(err.statusCode).toBe(statusCode);
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('PUT object taggings', () => {
|
describe('PUT object taggings', () => {
|
||||||
|
@ -69,7 +69,7 @@ describe('PUT object taggings', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
taggingTests.forEach(taggingTest => {
|
taggingTests.forEach(taggingTest => {
|
||||||
it(taggingTest.it, done => {
|
test(taggingTest.it, done => {
|
||||||
const taggingConfig = generateTaggingConfig(taggingTest.tag.key,
|
const taggingConfig = generateTaggingConfig(taggingTest.tag.key,
|
||||||
taggingTest.tag.value);
|
taggingTest.tag.value);
|
||||||
s3.putObjectTagging({ Bucket: bucketName, Key: objectName,
|
s3.putObjectTagging({ Bucket: bucketName, Key: objectName,
|
||||||
|
@ -78,14 +78,14 @@ describe('PUT object taggings', () => {
|
||||||
_checkError(err, taggingTest.error, 400);
|
_checkError(err, taggingTest.error, 400);
|
||||||
} else {
|
} else {
|
||||||
assert.ifError(err, `Found unexpected err ${err}`);
|
assert.ifError(err, `Found unexpected err ${err}`);
|
||||||
assert.strictEqual(Object.keys(data).length, 0);
|
expect(Object.keys(data).length).toBe(0);
|
||||||
}
|
}
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return BadRequest if putting more that 10 tags', done => {
|
test('should return BadRequest if putting more that 10 tags', done => {
|
||||||
const taggingConfig = generateMultipleTagConfig(11);
|
const taggingConfig = generateMultipleTagConfig(11);
|
||||||
s3.putObjectTagging({ Bucket: bucketName, Key: objectName,
|
s3.putObjectTagging({ Bucket: bucketName, Key: objectName,
|
||||||
Tagging: taggingConfig }, err => {
|
Tagging: taggingConfig }, err => {
|
||||||
|
@ -94,7 +94,7 @@ describe('PUT object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidTag if using the same key twice', done => {
|
test('should return InvalidTag if using the same key twice', done => {
|
||||||
s3.putObjectTagging({ Bucket: bucketName, Key: objectName,
|
s3.putObjectTagging({ Bucket: bucketName, Key: objectName,
|
||||||
Tagging: { TagSet: [
|
Tagging: { TagSet: [
|
||||||
{
|
{
|
||||||
|
@ -112,7 +112,7 @@ describe('PUT object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidTag if key is an empty string', done => {
|
test('should return InvalidTag if key is an empty string', done => {
|
||||||
s3.putObjectTagging({ Bucket: bucketName, Key: objectName,
|
s3.putObjectTagging({ Bucket: bucketName, Key: objectName,
|
||||||
Tagging: { TagSet: [
|
Tagging: { TagSet: [
|
||||||
{
|
{
|
||||||
|
@ -126,17 +126,17 @@ describe('PUT object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should be able to put an empty Tag set', done => {
|
test('should be able to put an empty Tag set', done => {
|
||||||
s3.putObjectTagging({ Bucket: bucketName, Key: objectName,
|
s3.putObjectTagging({ Bucket: bucketName, Key: objectName,
|
||||||
Tagging: { TagSet: [] },
|
Tagging: { TagSet: [] },
|
||||||
}, (err, data) => {
|
}, (err, data) => {
|
||||||
assert.ifError(err, `Found unexpected err ${err}`);
|
assert.ifError(err, `Found unexpected err ${err}`);
|
||||||
assert.strictEqual(Object.keys(data).length, 0);
|
expect(Object.keys(data).length).toBe(0);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return NoSuchKey put tag to a non-existing object', done => {
|
test('should return NoSuchKey put tag to a non-existing object', done => {
|
||||||
s3.putObjectTagging({
|
s3.putObjectTagging({
|
||||||
Bucket: bucketName,
|
Bucket: bucketName,
|
||||||
Key: 'nonexisting',
|
Key: 'nonexisting',
|
||||||
|
@ -147,7 +147,8 @@ describe('PUT object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 AccessDenied putting tag with another account',
|
test(
|
||||||
|
'should return 403 AccessDenied putting tag with another account',
|
||||||
done => {
|
done => {
|
||||||
otherAccountS3.putObjectTagging({ Bucket: bucketName, Key:
|
otherAccountS3.putObjectTagging({ Bucket: bucketName, Key:
|
||||||
objectName, Tagging: taggingConfig,
|
objectName, Tagging: taggingConfig,
|
||||||
|
@ -155,11 +156,11 @@ describe('PUT object taggings', () => {
|
||||||
_checkError(err, 'AccessDenied', 403);
|
_checkError(err, 'AccessDenied', 403);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
it('should return 403 AccessDenied putting tag with a different ' +
|
test('should return 403 AccessDenied putting tag with a different ' +
|
||||||
'account to an object with ACL "public-read-write"',
|
'account to an object with ACL "public-read-write"', done => {
|
||||||
done => {
|
|
||||||
s3.putObjectAcl({ Bucket: bucketName, Key: objectName,
|
s3.putObjectAcl({ Bucket: bucketName, Key: objectName,
|
||||||
ACL: 'public-read-write' }, err => {
|
ACL: 'public-read-write' }, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -174,9 +175,8 @@ describe('PUT object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 AccessDenied putting tag to an object ' +
|
test('should return 403 AccessDenied putting tag to an object ' +
|
||||||
'in a bucket created with a different account',
|
'in a bucket created with a different account', done => {
|
||||||
done => {
|
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.putBucketAcl({ Bucket: bucketName, ACL:
|
next => s3.putBucketAcl({ Bucket: bucketName, ACL:
|
||||||
'public-read-write' }, err => next(err)),
|
'public-read-write' }, err => next(err)),
|
||||||
|
@ -191,7 +191,7 @@ describe('PUT object taggings', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should put tag to an object in a bucket created with same ' +
|
test('should put tag to an object in a bucket created with same ' +
|
||||||
'account', done => {
|
'account', done => {
|
||||||
async.waterfall([
|
async.waterfall([
|
||||||
next => s3.putBucketAcl({ Bucket: bucketName, ACL:
|
next => s3.putBucketAcl({ Bucket: bucketName, ACL:
|
||||||
|
|
|
@ -44,21 +44,21 @@ describe('PUT object', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return Not Implemented error for obj. encryption using ' +
|
test('should return Not Implemented error for obj. encryption using ' +
|
||||||
'customer-provided encryption keys', done => {
|
'customer-provided encryption keys', done => {
|
||||||
const params = { Bucket: bucket, Key: 'key', PartNumber: 0,
|
const params = { Bucket: bucket, Key: 'key', PartNumber: 0,
|
||||||
UploadId: uploadId, SSECustomerAlgorithm: 'AES256' };
|
UploadId: uploadId, SSECustomerAlgorithm: 'AES256' };
|
||||||
s3.uploadPart(params, err => {
|
s3.uploadPart(params, err => {
|
||||||
assert.strictEqual(err.code, 'NotImplemented');
|
expect(err.code).toBe('NotImplemented');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return InvalidArgument if negative PartNumber', done => {
|
test('should return InvalidArgument if negative PartNumber', done => {
|
||||||
const params = { Bucket: bucket, Key: 'key', PartNumber: -1,
|
const params = { Bucket: bucket, Key: 'key', PartNumber: -1,
|
||||||
UploadId: uploadId };
|
UploadId: uploadId };
|
||||||
s3.uploadPart(params, err => {
|
s3.uploadPart(params, err => {
|
||||||
assert.strictEqual(err.code, 'InvalidArgument');
|
expect(err.code).toBe('InvalidArgument');
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -87,9 +87,9 @@ function createHashedFile(bytes) {
|
||||||
}
|
}
|
||||||
|
|
||||||
describeSkipIfCeph('aws-node-sdk range tests', () => {
|
describeSkipIfCeph('aws-node-sdk range tests', () => {
|
||||||
before(() => execFileAsync('gcc', ['-o', 'getRangeExec',
|
beforeAll(() => execFileAsync('gcc', ['-o', 'getRangeExec',
|
||||||
'lib/utility/getRange.c']));
|
'lib/utility/getRange.c']));
|
||||||
after(() => execAsync('rm getRangeExec'));
|
afterAll(() => execAsync('rm getRangeExec'));
|
||||||
|
|
||||||
describe('aws-node-sdk range test for object put by MPU', () =>
|
describe('aws-node-sdk range test for object put by MPU', () =>
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
|
@ -125,8 +125,7 @@ describeSkipIfCeph('aws-node-sdk range tests', () => {
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}))
|
})));
|
||||||
);
|
|
||||||
|
|
||||||
afterEach(() => bucketUtil.empty(bucket)
|
afterEach(() => bucketUtil.empty(bucket)
|
||||||
.then(() => s3.abortMultipartUploadAsync({
|
.then(() => s3.abortMultipartUploadAsync({
|
||||||
|
@ -141,19 +140,18 @@ describeSkipIfCeph('aws-node-sdk range tests', () => {
|
||||||
resolve();
|
resolve();
|
||||||
}))
|
}))
|
||||||
.then(() => bucketUtil.deleteOne(bucket))
|
.then(() => bucketUtil.deleteOne(bucket))
|
||||||
.then(() => execAsync(`rm hashedFile.${fileSize}*`))
|
.then(() => execAsync(`rm hashedFile.${fileSize}*`)));
|
||||||
);
|
|
||||||
|
|
||||||
it('should get a range from the first part of an object', () =>
|
test('should get a range from the first part of an object', () =>
|
||||||
checkRanges('0-9', fileSize));
|
checkRanges('0-9', fileSize));
|
||||||
|
|
||||||
it('should get a range from the second part of an object', () =>
|
test('should get a range from the second part of an object', () =>
|
||||||
checkRanges('5242880-5242889', fileSize));
|
checkRanges('5242880-5242889', fileSize));
|
||||||
|
|
||||||
it('should get a range that spans both parts of an object', () =>
|
test('should get a range that spans both parts of an object', () =>
|
||||||
checkRanges('5242875-5242884', fileSize));
|
checkRanges('5242875-5242884', fileSize));
|
||||||
|
|
||||||
it('should get a range from the second part of an object and ' +
|
test('should get a range from the second part of an object and ' +
|
||||||
'include the end if the range requested goes beyond the ' +
|
'include the end if the range requested goes beyond the ' +
|
||||||
'actual object end', () =>
|
'actual object end', () =>
|
||||||
checkRanges('10485750-10485790', fileSize));
|
checkRanges('10485750-10485790', fileSize));
|
||||||
|
@ -210,7 +208,7 @@ describeSkipIfCeph('aws-node-sdk range tests', () => {
|
||||||
];
|
];
|
||||||
|
|
||||||
putRangeTests.forEach(range => {
|
putRangeTests.forEach(range => {
|
||||||
it(`should get a range of ${range} bytes using a ${fileSize} ` +
|
test(`should get a range of ${range} bytes using a ${fileSize} ` +
|
||||||
'byte sized object', () =>
|
'byte sized object', () =>
|
||||||
checkRanges(range, fileSize));
|
checkRanges(range, fileSize));
|
||||||
});
|
});
|
||||||
|
@ -236,11 +234,11 @@ describeSkipIfCeph('aws-node-sdk range tests', () => {
|
||||||
.then(() => bucketUtil.deleteOne(bucket))
|
.then(() => bucketUtil.deleteOne(bucket))
|
||||||
.then(() => execAsync(`rm hashedFile.${fileSize}*`)));
|
.then(() => execAsync(`rm hashedFile.${fileSize}*`)));
|
||||||
|
|
||||||
it('should get the final 90 bytes of a 2890 byte object for a ' +
|
test('should get the final 90 bytes of a 2890 byte object for a ' +
|
||||||
'byte range of 2800-', () =>
|
'byte range of 2800-', () =>
|
||||||
checkRanges('2800-', fileSize));
|
checkRanges('2800-', fileSize));
|
||||||
|
|
||||||
it('should get the final 90 bytes of a 2890 byte object for a ' +
|
test('should get the final 90 bytes of a 2890 byte object for a ' +
|
||||||
'byte range of 2800-Number.MAX_SAFE_INTEGER', () =>
|
'byte range of 2800-Number.MAX_SAFE_INTEGER', () =>
|
||||||
checkRanges(`2800-${Number.MAX_SAFE_INTEGER}`, fileSize));
|
checkRanges(`2800-${Number.MAX_SAFE_INTEGER}`, fileSize));
|
||||||
});
|
});
|
||||||
|
|
|
@ -44,7 +44,7 @@ function putBucketWebsiteAndPutObjectRedirect(redirect, condition, key, done) {
|
||||||
}
|
}
|
||||||
|
|
||||||
describe('User visits bucket website endpoint', () => {
|
describe('User visits bucket website endpoint', () => {
|
||||||
it('should return 404 when no such bucket', done => {
|
test('should return 404 when no such bucket', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: endpoint,
|
url: endpoint,
|
||||||
|
@ -57,7 +57,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
|
|
||||||
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
||||||
|
|
||||||
it('should return 404 when no website configuration', done => {
|
test('should return 404 when no website configuration', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: endpoint,
|
url: endpoint,
|
||||||
|
@ -70,15 +70,14 @@ describe('User visits bucket website endpoint', () => {
|
||||||
const webConfig = new WebsiteConfigTester('index.html');
|
const webConfig = new WebsiteConfigTester('index.html');
|
||||||
s3.putBucketWebsite({ Bucket: bucket,
|
s3.putBucketWebsite({ Bucket: bucket,
|
||||||
WebsiteConfiguration: webConfig }, err => {
|
WebsiteConfiguration: webConfig }, err => {
|
||||||
assert.strictEqual(err,
|
expect(err).toBe(null);
|
||||||
null, `Found unexpected err ${err}`);
|
|
||||||
s3.putObject({ Bucket: bucket, Key: 'index.html',
|
s3.putObject({ Bucket: bucket, Key: 'index.html',
|
||||||
ACL: 'public-read',
|
ACL: 'public-read',
|
||||||
Body: fs.readFileSync(path.join(__dirname,
|
Body: fs.readFileSync(path.join(__dirname,
|
||||||
'/websiteFiles/index.html')),
|
'/websiteFiles/index.html')),
|
||||||
ContentType: 'text/html' },
|
ContentType: 'text/html' },
|
||||||
err => {
|
err => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -89,30 +88,29 @@ describe('User visits bucket website endpoint', () => {
|
||||||
err => done(err));
|
err => done(err));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 405 when user requests method other than get ' +
|
test('should return 405 when user requests method other than get ' +
|
||||||
'or head', done => {
|
'or head', done => {
|
||||||
makeRequest({
|
makeRequest({
|
||||||
hostname,
|
hostname,
|
||||||
port,
|
port,
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
}, (err, res) => {
|
}, (err, res) => {
|
||||||
assert.strictEqual(err, null,
|
expect(err).toBe(null);
|
||||||
`Err with request ${err}`);
|
expect(res.statusCode).toBe(405);
|
||||||
assert.strictEqual(res.statusCode, 405);
|
expect(res.body.indexOf('<head><title>405 ' +
|
||||||
assert(res.body.indexOf('<head><title>405 ' +
|
'Method Not Allowed</title></head>') > -1).toBeTruthy();
|
||||||
'Method Not Allowed</title></head>') > -1);
|
|
||||||
return done();
|
return done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should serve indexDocument if no key requested', done => {
|
test('should serve indexDocument if no key requested', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: endpoint,
|
url: endpoint,
|
||||||
responseType: 'index-user',
|
responseType: 'index-user',
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
it('should serve indexDocument if key requested', done => {
|
test('should serve indexDocument if key requested', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: `${endpoint}/index.html`,
|
url: `${endpoint}/index.html`,
|
||||||
|
@ -125,8 +123,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
const webConfig = new WebsiteConfigTester('index.html');
|
const webConfig = new WebsiteConfigTester('index.html');
|
||||||
s3.putBucketWebsite({ Bucket: bucket,
|
s3.putBucketWebsite({ Bucket: bucket,
|
||||||
WebsiteConfiguration: webConfig }, err => {
|
WebsiteConfiguration: webConfig }, err => {
|
||||||
assert.strictEqual(err,
|
expect(err).toBe(null);
|
||||||
null, `Found unexpected err ${err}`);
|
|
||||||
s3.putObject({ Bucket: bucket,
|
s3.putObject({ Bucket: bucket,
|
||||||
Key: 'pathprefix/index.html',
|
Key: 'pathprefix/index.html',
|
||||||
ACL: 'public-read',
|
ACL: 'public-read',
|
||||||
|
@ -142,8 +139,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
done);
|
done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should serve indexDocument if path request without key',
|
test('should serve indexDocument if path request without key', done => {
|
||||||
done => {
|
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: `${endpoint}/pathprefix/`,
|
url: `${endpoint}/pathprefix/`,
|
||||||
|
@ -151,8 +147,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should serve indexDocument if path request with key',
|
test('should serve indexDocument if path request with key', done => {
|
||||||
done => {
|
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: `${endpoint}/pathprefix/index.html`,
|
url: `${endpoint}/pathprefix/index.html`,
|
||||||
|
@ -166,8 +161,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
const webConfig = new WebsiteConfigTester('index.html');
|
const webConfig = new WebsiteConfigTester('index.html');
|
||||||
s3.putBucketWebsite({ Bucket: bucket,
|
s3.putBucketWebsite({ Bucket: bucket,
|
||||||
WebsiteConfiguration: webConfig }, err => {
|
WebsiteConfiguration: webConfig }, err => {
|
||||||
assert.strictEqual(err,
|
expect(err).toBe(null);
|
||||||
null, `Found unexpected err ${err}`);
|
|
||||||
s3.putObject({ Bucket: bucket,
|
s3.putObject({ Bucket: bucket,
|
||||||
Key: 'index.html',
|
Key: 'index.html',
|
||||||
ACL: 'private',
|
ACL: 'private',
|
||||||
|
@ -181,7 +175,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
s3.deleteObject({ Bucket: bucket, Key: 'index.html' }, done);
|
s3.deleteObject({ Bucket: bucket, Key: 'index.html' }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 if key is private', done => {
|
test('should return 403 if key is private', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: endpoint,
|
url: endpoint,
|
||||||
|
@ -197,7 +191,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 if nonexisting index document key', done => {
|
test('should return 403 if nonexisting index document key', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: endpoint,
|
url: endpoint,
|
||||||
|
@ -217,7 +211,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should redirect to ${redirectEndpoint}`, done => {
|
test(`should redirect to ${redirectEndpoint}`, done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: endpoint,
|
url: endpoint,
|
||||||
|
@ -226,7 +220,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should redirect to ${redirectEndpoint}/about`, done => {
|
test(`should redirect to ${redirectEndpoint}/about`, done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: `${endpoint}/about`,
|
url: `${endpoint}/about`,
|
||||||
|
@ -252,7 +246,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should redirect to https://google.com/', done => {
|
test('should redirect to https://google.com/', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: endpoint,
|
url: endpoint,
|
||||||
|
@ -261,7 +255,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should redirect to https://google.com/about', done => {
|
test('should redirect to https://google.com/about', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: `${endpoint}/about`,
|
url: `${endpoint}/about`,
|
||||||
|
@ -277,8 +271,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
'error.html');
|
'error.html');
|
||||||
s3.putBucketWebsite({ Bucket: bucket,
|
s3.putBucketWebsite({ Bucket: bucket,
|
||||||
WebsiteConfiguration: webConfig }, err => {
|
WebsiteConfiguration: webConfig }, err => {
|
||||||
assert.strictEqual(err,
|
expect(err).toBe(null);
|
||||||
null, `Found unexpected err ${err}`);
|
|
||||||
s3.putObject({ Bucket: bucket,
|
s3.putObject({ Bucket: bucket,
|
||||||
Key: 'error.html',
|
Key: 'error.html',
|
||||||
ACL: 'public-read',
|
ACL: 'public-read',
|
||||||
|
@ -292,8 +285,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
s3.deleteObject({ Bucket: bucket, Key: 'error.html' }, done);
|
s3.deleteObject({ Bucket: bucket, Key: 'error.html' }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should serve custom error document if an error occurred',
|
test('should serve custom error document if an error occurred', done => {
|
||||||
done => {
|
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: endpoint,
|
url: endpoint,
|
||||||
|
@ -310,7 +302,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should serve s3 error file if unfound custom error document ' +
|
test('should serve s3 error file if unfound custom error document ' +
|
||||||
'and an error occurred', done => {
|
'and an error occurred', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
|
@ -334,7 +326,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should redirect to ${redirectEndpoint} if error 403` +
|
test(`should redirect to ${redirectEndpoint} if error 403` +
|
||||||
' occured', done => {
|
' occured', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
|
@ -359,7 +351,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should redirect to ${redirectEndpoint}/about/ if ` +
|
test(`should redirect to ${redirectEndpoint}/about/ if ` +
|
||||||
'key prefix is equal to "about"', done => {
|
'key prefix is equal to "about"', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
|
@ -386,7 +378,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should redirect to ${redirectEndpoint} if ` +
|
test(`should redirect to ${redirectEndpoint} if ` +
|
||||||
'key prefix is equal to "about" AND error code 403', done => {
|
'key prefix is equal to "about" AND error code 403', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
|
@ -415,7 +407,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should redirect to the first one', done => {
|
test('should redirect to the first one', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: `${endpoint}/about/`,
|
url: `${endpoint}/about/`,
|
||||||
|
@ -441,7 +433,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should redirect to https://www.google.com/about if ' +
|
test('should redirect to https://www.google.com/about if ' +
|
||||||
'https protocols', done => {
|
'https protocols', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
|
@ -469,8 +461,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
err => done(err));
|
err => done(err));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should serve redirect file if error 403 error occured',
|
test('should serve redirect file if error 403 error occured', done => {
|
||||||
done => {
|
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: endpoint,
|
url: endpoint,
|
||||||
|
@ -495,7 +486,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should redirect to ${redirectEndpoint}/about/ if ` +
|
test(`should redirect to ${redirectEndpoint}/about/ if ` +
|
||||||
'ReplaceKeyPrefixWith equals "about/"', done => {
|
'ReplaceKeyPrefixWith equals "about/"', done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
|
@ -524,8 +515,7 @@ describe('User visits bucket website endpoint', () => {
|
||||||
err => done(err));
|
err => done(err));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should serve redirect file if key prefix is equal to "about"',
|
test('should serve redirect file if key prefix is equal to "about"', done => {
|
||||||
done => {
|
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: `${endpoint}/about/`,
|
url: `${endpoint}/about/`,
|
||||||
|
@ -554,9 +544,8 @@ describe('User visits bucket website endpoint', () => {
|
||||||
err => done(err));
|
err => done(err));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should serve redirect file if key prefix is equal to ' +
|
test('should serve redirect file if key prefix is equal to ' +
|
||||||
'"about" and error 403',
|
'"about" and error 403', done => {
|
||||||
done => {
|
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: `${endpoint}/about/`,
|
url: `${endpoint}/about/`,
|
||||||
|
|
|
@ -142,7 +142,7 @@ describe('User visits bucket website endpoint with ACL', () => {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`${test.it} with no auth credentials sent`, done => {
|
test(`${test.it} with no auth credentials sent`, done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
url: endpoint,
|
url: endpoint,
|
||||||
|
@ -150,7 +150,7 @@ describe('User visits bucket website endpoint with ACL', () => {
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`${test.it} even with invalid auth credentials`, done => {
|
test(`${test.it} even with invalid auth credentials`, done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
auth: 'invalid credentials',
|
auth: 'invalid credentials',
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
|
@ -159,7 +159,7 @@ describe('User visits bucket website endpoint with ACL', () => {
|
||||||
}, done);
|
}, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`${test.it} even with valid auth credentials`, done => {
|
test(`${test.it} even with valid auth credentials`, done => {
|
||||||
WebsiteConfigTester.checkHTML({
|
WebsiteConfigTester.checkHTML({
|
||||||
auth: 'valid credentials',
|
auth: 'valid credentials',
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
|
|
|
@ -77,7 +77,7 @@ const indexExpectedHeaders = {
|
||||||
|
|
||||||
|
|
||||||
describe('Head request on bucket website endpoint', () => {
|
describe('Head request on bucket website endpoint', () => {
|
||||||
it('should return 404 when no such bucket', done => {
|
test('should return 404 when no such bucket', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
'x-amz-error-code': 'NoSuchBucket',
|
'x-amz-error-code': 'NoSuchBucket',
|
||||||
// Need arsenal fixed to remove period at the end
|
// Need arsenal fixed to remove period at the end
|
||||||
|
@ -93,7 +93,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
|
|
||||||
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
afterEach(done => s3.deleteBucket({ Bucket: bucket }, done));
|
||||||
|
|
||||||
it('should return 404 when no website configuration', done => {
|
test('should return 404 when no website configuration', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
'x-amz-error-code': 'NoSuchWebsiteConfiguration',
|
'x-amz-error-code': 'NoSuchWebsiteConfiguration',
|
||||||
'x-amz-error-message': 'The specified bucket does not ' +
|
'x-amz-error-message': 'The specified bucket does not ' +
|
||||||
|
@ -108,8 +108,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
const webConfig = new WebsiteConfigTester('index.html');
|
const webConfig = new WebsiteConfigTester('index.html');
|
||||||
s3.putBucketWebsite({ Bucket: bucket,
|
s3.putBucketWebsite({ Bucket: bucket,
|
||||||
WebsiteConfiguration: webConfig }, err => {
|
WebsiteConfiguration: webConfig }, err => {
|
||||||
assert.strictEqual(err,
|
expect(err).toBe(null);
|
||||||
null, `Found unexpected err ${err}`);
|
|
||||||
s3.putObject({ Bucket: bucket, Key: 'index.html',
|
s3.putObject({ Bucket: bucket, Key: 'index.html',
|
||||||
ACL: 'public-read',
|
ACL: 'public-read',
|
||||||
Body: fs.readFileSync(path.join(__dirname,
|
Body: fs.readFileSync(path.join(__dirname,
|
||||||
|
@ -120,7 +119,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
err => {
|
err => {
|
||||||
assert.strictEqual(err, null);
|
expect(err).toBe(null);
|
||||||
done();
|
done();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -131,13 +130,13 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
err => done(err));
|
err => done(err));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return indexDocument headers if no key ' +
|
test('should return indexDocument headers if no key ' +
|
||||||
'requested', done => {
|
'requested', done => {
|
||||||
WebsiteConfigTester.makeHeadRequest(undefined, endpoint,
|
WebsiteConfigTester.makeHeadRequest(undefined, endpoint,
|
||||||
200, indexExpectedHeaders, done);
|
200, indexExpectedHeaders, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return indexDocument headers if key requested', done => {
|
test('should return indexDocument headers if key requested', done => {
|
||||||
WebsiteConfigTester.makeHeadRequest(undefined,
|
WebsiteConfigTester.makeHeadRequest(undefined,
|
||||||
`${endpoint}/index.html`, 200, indexExpectedHeaders, done);
|
`${endpoint}/index.html`, 200, indexExpectedHeaders, done);
|
||||||
});
|
});
|
||||||
|
@ -148,8 +147,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
const webConfig = new WebsiteConfigTester('index.html');
|
const webConfig = new WebsiteConfigTester('index.html');
|
||||||
s3.putBucketWebsite({ Bucket: bucket,
|
s3.putBucketWebsite({ Bucket: bucket,
|
||||||
WebsiteConfiguration: webConfig }, err => {
|
WebsiteConfiguration: webConfig }, err => {
|
||||||
assert.strictEqual(err,
|
expect(err).toBe(null);
|
||||||
null, `Found unexpected err ${err}`);
|
|
||||||
s3.putObject({ Bucket: bucket,
|
s3.putObject({ Bucket: bucket,
|
||||||
Key: 'pathprefix/index.html',
|
Key: 'pathprefix/index.html',
|
||||||
ACL: 'public-read',
|
ACL: 'public-read',
|
||||||
|
@ -169,14 +167,12 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
done);
|
done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should serve indexDocument if path request without key',
|
test('should serve indexDocument if path request without key', done => {
|
||||||
done => {
|
|
||||||
WebsiteConfigTester.makeHeadRequest(undefined,
|
WebsiteConfigTester.makeHeadRequest(undefined,
|
||||||
`${endpoint}/pathprefix/`, 200, indexExpectedHeaders, done);
|
`${endpoint}/pathprefix/`, 200, indexExpectedHeaders, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should serve indexDocument if path request with key',
|
test('should serve indexDocument if path request with key', done => {
|
||||||
done => {
|
|
||||||
WebsiteConfigTester.makeHeadRequest(undefined,
|
WebsiteConfigTester.makeHeadRequest(undefined,
|
||||||
`${endpoint}/pathprefix/index.html`, 200,
|
`${endpoint}/pathprefix/index.html`, 200,
|
||||||
indexExpectedHeaders, done);
|
indexExpectedHeaders, done);
|
||||||
|
@ -188,8 +184,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
const webConfig = new WebsiteConfigTester('index.html');
|
const webConfig = new WebsiteConfigTester('index.html');
|
||||||
s3.putBucketWebsite({ Bucket: bucket,
|
s3.putBucketWebsite({ Bucket: bucket,
|
||||||
WebsiteConfiguration: webConfig }, err => {
|
WebsiteConfiguration: webConfig }, err => {
|
||||||
assert.strictEqual(err,
|
expect(err).toBe(null);
|
||||||
null, `Found unexpected err ${err}`);
|
|
||||||
s3.putObject({ Bucket: bucket,
|
s3.putObject({ Bucket: bucket,
|
||||||
Key: 'index.html',
|
Key: 'index.html',
|
||||||
ACL: 'private',
|
ACL: 'private',
|
||||||
|
@ -203,7 +198,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
s3.deleteObject({ Bucket: bucket, Key: 'index.html' }, done);
|
s3.deleteObject({ Bucket: bucket, Key: 'index.html' }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 if key is private', done => {
|
test('should return 403 if key is private', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
'x-amz-error-code': 'AccessDenied',
|
'x-amz-error-code': 'AccessDenied',
|
||||||
'x-amz-error-message': 'Access Denied',
|
'x-amz-error-message': 'Access Denied',
|
||||||
|
@ -220,7 +215,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return 403 if nonexisting index document key', done => {
|
test('should return 403 if nonexisting index document key', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
'x-amz-error-code': 'AccessDenied',
|
'x-amz-error-code': 'AccessDenied',
|
||||||
'x-amz-error-message': 'Access Denied',
|
'x-amz-error-message': 'Access Denied',
|
||||||
|
@ -241,7 +236,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should redirect to ${redirectEndpoint}`, done => {
|
test(`should redirect to ${redirectEndpoint}`, done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
location: redirectEndpoint,
|
location: redirectEndpoint,
|
||||||
};
|
};
|
||||||
|
@ -249,7 +244,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
endpoint, 301, expectedHeaders, done);
|
endpoint, 301, expectedHeaders, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should redirect to ${redirectEndpoint}about`, done => {
|
test(`should redirect to ${redirectEndpoint}about`, done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
location: `${redirectEndpoint}about/`,
|
location: `${redirectEndpoint}about/`,
|
||||||
};
|
};
|
||||||
|
@ -274,7 +269,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should redirect to https://google.com', done => {
|
test('should redirect to https://google.com', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
location: 'https://www.google.com/',
|
location: 'https://www.google.com/',
|
||||||
};
|
};
|
||||||
|
@ -282,7 +277,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
301, expectedHeaders, done);
|
301, expectedHeaders, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should redirect to https://google.com/about', done => {
|
test('should redirect to https://google.com/about', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
location: 'https://www.google.com/about/',
|
location: 'https://www.google.com/about/',
|
||||||
};
|
};
|
||||||
|
@ -297,8 +292,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
'error.html');
|
'error.html');
|
||||||
s3.putBucketWebsite({ Bucket: bucket,
|
s3.putBucketWebsite({ Bucket: bucket,
|
||||||
WebsiteConfiguration: webConfig }, err => {
|
WebsiteConfiguration: webConfig }, err => {
|
||||||
assert.strictEqual(err,
|
expect(err).toBe(null);
|
||||||
null, `Found unexpected err ${err}`);
|
|
||||||
s3.putObject({ Bucket: bucket,
|
s3.putObject({ Bucket: bucket,
|
||||||
Key: 'error.html',
|
Key: 'error.html',
|
||||||
ACL: 'public-read',
|
ACL: 'public-read',
|
||||||
|
@ -312,7 +306,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
s3.deleteObject({ Bucket: bucket, Key: 'error.html' }, done);
|
s3.deleteObject({ Bucket: bucket, Key: 'error.html' }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return regular error headers regardless of whether ' +
|
test('should return regular error headers regardless of whether ' +
|
||||||
'custom error document', done => {
|
'custom error document', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
'x-amz-error-code': 'AccessDenied',
|
'x-amz-error-code': 'AccessDenied',
|
||||||
|
@ -337,7 +331,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should redirect to ${redirectEndpoint} if error 403` +
|
test(`should redirect to ${redirectEndpoint} if error 403` +
|
||||||
' occured', done => {
|
' occured', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
location: redirectEndpoint,
|
location: redirectEndpoint,
|
||||||
|
@ -361,7 +355,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should redirect to ${redirectEndpoint}about if ` +
|
test(`should redirect to ${redirectEndpoint}about if ` +
|
||||||
'key prefix is equal to "about"', done => {
|
'key prefix is equal to "about"', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
location: `${redirectEndpoint}about/`,
|
location: `${redirectEndpoint}about/`,
|
||||||
|
@ -387,7 +381,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should redirect to ${redirectEndpoint} if ` +
|
test(`should redirect to ${redirectEndpoint} if ` +
|
||||||
'key prefix is equal to "about" AND error code 403', done => {
|
'key prefix is equal to "about" AND error code 403', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
location: `${redirectEndpoint}about/`,
|
location: `${redirectEndpoint}about/`,
|
||||||
|
@ -415,7 +409,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should redirect based on first rule', done => {
|
test('should redirect based on first rule', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
location: `${redirectEndpoint}about/`,
|
location: `${redirectEndpoint}about/`,
|
||||||
};
|
};
|
||||||
|
@ -440,7 +434,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should redirect to https://www.google.com/about if ' +
|
test('should redirect to https://www.google.com/about if ' +
|
||||||
'https protocol specified', done => {
|
'https protocol specified', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
location: 'https://www.google.com/about/',
|
location: 'https://www.google.com/about/',
|
||||||
|
@ -469,7 +463,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
err => done(err));
|
err => done(err));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should redirect to specified file if 403 error ' +
|
test('should redirect to specified file if 403 error ' +
|
||||||
'error occured', done => {
|
'error occured', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
location: `${endpoint}/redirect.html`,
|
location: `${endpoint}/redirect.html`,
|
||||||
|
@ -494,7 +488,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
WebsiteConfiguration: webConfig }, done);
|
WebsiteConfiguration: webConfig }, done);
|
||||||
});
|
});
|
||||||
|
|
||||||
it(`should redirect to ${redirectEndpoint}about if ` +
|
test(`should redirect to ${redirectEndpoint}about if ` +
|
||||||
'ReplaceKeyPrefixWith equals "about"', done => {
|
'ReplaceKeyPrefixWith equals "about"', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
location: `${redirectEndpoint}about`,
|
location: `${redirectEndpoint}about`,
|
||||||
|
@ -524,7 +518,7 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
err => done(err));
|
err => done(err));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should redirect to "redirect/" object if key prefix is equal ' +
|
test('should redirect to "redirect/" object if key prefix is equal ' +
|
||||||
'to "about/"', done => {
|
'to "about/"', done => {
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
location: `${endpoint}/redirect/`,
|
location: `${endpoint}/redirect/`,
|
||||||
|
@ -555,10 +549,9 @@ describe('Head request on bucket website endpoint', () => {
|
||||||
err => done(err));
|
err => done(err));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should redirect to "redirect" object if key prefix is equal ' +
|
test('should redirect to "redirect" object if key prefix is equal ' +
|
||||||
'to "about/" and there is a 403 error satisfying the ' +
|
'to "about/" and there is a 403 error satisfying the ' +
|
||||||
'condition in the redirect rule',
|
'condition in the redirect rule', done => {
|
||||||
done => {
|
|
||||||
const expectedHeaders = {
|
const expectedHeaders = {
|
||||||
location: `${endpoint}/redirect/`,
|
location: `${endpoint}/redirect/`,
|
||||||
};
|
};
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue