Compare commits

...

13 Commits

Author SHA1 Message Date
alexandre merle c371293dd2 wip 2021-02-08 07:12:11 +01:00
alexandre merle 42f3eedffc to-remove 2021-02-07 05:39:57 +01:00
alexandre merle 721ab3c882 tests: skip retention / object lock / legal hold for ceph 2021-02-07 05:39:11 +01:00
alexandre merle 151c8137c5 testfix: fix versioning state change if replication 2021-02-06 07:11:41 +01:00
alexandre merle b174d6a971 bugfix: init metadata wrapper for tests 2021-02-06 07:11:41 +01:00
alexandre merle f86d0b5f8b bugfix: use MPU testing for multiple backend 2021-02-06 06:53:44 +01:00
alexandre merle f50ddcd265 fix sanity check on complete upload 2021-02-06 06:53:44 +01:00
alexandre merle 21f0a1ee6e bugfix: considering range start 0 as valid 2021-02-06 06:53:44 +01:00
alexandre merle fc3e7787ed tests: activate aws debug logging 2021-02-06 06:53:44 +01:00
alexandre merle 33ce1a18cc bugfix: ZENKO-2352: fix aws tests 2021-02-06 06:53:44 +01:00
alexandre merle c85c0c4e3f improv: use agentkeepalive for global agent 2021-02-06 06:53:43 +01:00
alexandre merle 07f7825aa2 fix aws sdk upgrade 2021-02-06 06:48:09 +01:00
alexandre merle c58f42d9eb Remove .only() 2021-02-06 06:48:09 +01:00
34 changed files with 177 additions and 83 deletions

View File

@ -66,6 +66,7 @@ models:
- env: &multiple-backend-vars - env: &multiple-backend-vars
S3BACKEND: "mem" S3BACKEND: "mem"
S3DATA: "multiple" S3DATA: "multiple"
MPU_TESTING: "yes"
- env: &file-mem-mpu - env: &file-mem-mpu
S3BACKEND: "file" S3BACKEND: "file"
S3VAULT: "mem" S3VAULT: "mem"

View File

@ -17,8 +17,13 @@ function locationKeysSanityCheck(prev, curr) {
return curr.every(v => v.key !== prev); return curr.every(v => v.key !== prev);
} }
const keysMap = {}; const keysMap = {};
prev.forEach(v => { keysMap[v.key] = true; }); prev.forEach(v => {
return curr.every(v => !keysMap[v.key]); if (!keysMap[v.dataStoreType]) {
keysMap[v.dataStoreType] = {};
}
keysMap[v.dataStoreType][v.key] = true;
});
return curr.every(v => !(keysMap[v.dataStoreType] && keysMap[v.dataStoreType][v.key]));
} }
module.exports = locationKeysSanityCheck; module.exports = locationKeysSanityCheck;

View File

@ -130,7 +130,7 @@ function objectGet(authInfo, request, returnTagCount, log, callback) {
range[1] - range[0] + 1; range[1] - range[0] + 1;
responseMetaHeaders['Content-Range'] = responseMetaHeaders['Content-Range'] =
`bytes ${range[0]}-${range[1]}/${objLength}`; `bytes ${range[0]}-${range[1]}/${objLength}`;
streamingParams.rangeStart = range[0] ? streamingParams.rangeStart = (range[0] || typeof range[0] === 'number') ?
range[0].toString() : undefined; range[0].toString() : undefined;
streamingParams.rangeEnd = range[1] ? streamingParams.rangeEnd = range[1] ?
range[1].toString() : undefined; range[1].toString() : undefined;

View File

@ -24,6 +24,7 @@ const {
isManagementAgentUsed, isManagementAgentUsed,
} = require('./management/agentClient'); } = require('./management/agentClient');
const HttpAgent = require('agentkeepalive');
const routes = arsenal.s3routes.routes; const routes = arsenal.s3routes.routes;
const { parseLC, MultipleBackendGateway } = arsenal.storage.data; const { parseLC, MultipleBackendGateway } = arsenal.storage.data;
const websiteEndpoints = _config.websiteEndpoints; const websiteEndpoints = _config.websiteEndpoints;
@ -65,7 +66,10 @@ class S3Server {
constructor(worker) { constructor(worker) {
this.worker = worker; this.worker = worker;
this.cluster = true; this.cluster = true;
http.globalAgent.keepAlive = true; http.globalAgent = new HttpAgent({
keepAlive: true,
freeSocketTimeout: arsenal.constants.httpClientFreeSocketTimeout,
});
process.on('SIGINT', this.cleanUp.bind(this)); process.on('SIGINT', this.cleanUp.bind(this));
process.on('SIGHUP', this.cleanUp.bind(this)); process.on('SIGHUP', this.cleanUp.bind(this));

View File

@ -20,7 +20,8 @@
"homepage": "https://github.com/scality/S3#readme", "homepage": "https://github.com/scality/S3#readme",
"dependencies": { "dependencies": {
"@hapi/joi": "^17.1.0", "@hapi/joi": "^17.1.0",
"arsenal": "github:scality/Arsenal#372df63", "agentkeepalive": "^4.1.3",
"arsenal": "github:scality/Arsenal#bugfix/ZENKO-2153-fix-tests",
"async": "~2.5.0", "async": "~2.5.0",
"aws-sdk": "2.831.0", "aws-sdk": "2.831.0",
"azure-storage": "^2.1.0", "azure-storage": "^2.1.0",

View File

@ -1,4 +1,6 @@
const bluebird = require('bluebird'); const bluebird = require('bluebird');
const AWS = require('aws-sdk');
AWS.config.logger = console;
const { S3 } = require('aws-sdk'); const { S3 } = require('aws-sdk');
const projectFixture = require('../fixtures/project'); const projectFixture = require('../fixtures/project');
const getConfig = require('../../test/support/config'); const getConfig = require('../../test/support/config');

View File

@ -302,7 +302,7 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
assert.ifError(err); assert.ifError(err);
assert.strictEqual(res.Location, `/${bucketName}`); assert.strictEqual(res.Location, `/${bucketName}`);
return next(); return next();
}).promise(), }),
next => bucketUtil.s3.getBucketLocation( next => bucketUtil.s3.getBucketLocation(
{ {
Bucket: bucketName, Bucket: bucketName,

View File

@ -30,7 +30,7 @@ describeSkipIfNotMultiple('Multiple backend delete', () => {
process.stdout.write('Creating bucket\n'); process.stdout.write('Creating bucket\n');
bucketUtil = new BucketUtility('default', sigCfg); bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3; s3 = bucketUtil.s3;
return s3.createBucketPromsie({ Bucket: bucket }) return s3.createBucket({ Bucket: bucket }).promise()
.catch(err => { .catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`); process.stdout.write(`Error creating bucket: ${err}\n`);
throw err; throw err;
@ -154,5 +154,42 @@ describeSkipIfNotMultiple('Multiple backend delete', () => {
}); });
}); });
}); });
it('should delete object while mpu in progress', () => {
let uploadId = null;
return s3.putObject({
Bucket: bucket,
Key: fileObject,
Body: body,
Metadata: {
'scal-location-constraint': fileLocation,
},
}).promise().then(() => { // eslint-disable-line arrow-body-style
return s3.createMultipartUpload({
Bucket: bucket,
Key: fileObject,
}).promise();
}).then(res => {
uploadId = res.UploadId;
return s3.deleteObject({
Bucket: bucket,
Key: fileObject,
}).promise();
}).then(() => { // eslint-disable-line arrow-body-style
return s3.abortMultipartUpload({
Bucket: bucket,
Key: fileObject,
UploadId: uploadId,
}).promise();
}).then(() => { // eslint-disable-line arrow-body-style
return s3.getObject({
Bucket: bucket,
Key: fileObject,
}).promise().catch(err => {
if (err.code !== 'NoSuchKey') {
throw err;
}
});
});
});
}); });
}); });

View File

@ -55,7 +55,7 @@ function _assertDeleteResult(result, resultType, requestVersionId) {
`did not expect version id in result, got "${result.VersionId}"`); `did not expect version id in result, got "${result.VersionId}"`);
} }
if (expectDeleteMarker) { if (expectDeleteMarker) {
assert.strictEqual(result.DeleteMarker, 'true'); assert.strictEqual(result.DeleteMarker, true);
} else { } else {
assert.strictEqual(result.DeleteMarker, undefined); assert.strictEqual(result.DeleteMarker, undefined);
} }

View File

@ -119,7 +119,7 @@ describe('Multiple backend get object', function testSuite() {
}, (err, res) => { }, (err, res) => {
assert.equal(err, null, 'Expected success but got ' + assert.equal(err, null, 'Expected success but got ' +
`error ${err}`); `error ${err}`);
assert.strictEqual(res.ContentLength, '10'); assert.strictEqual(res.ContentLength, 10);
assert.strictEqual(res.Body.toString(), 'helloworld'); assert.strictEqual(res.Body.toString(), 'helloworld');
assert.deepStrictEqual(res.Metadata, assert.deepStrictEqual(res.Metadata,
{ 'scal-location-constraint': awsLocation }); { 'scal-location-constraint': awsLocation });
@ -171,7 +171,7 @@ describe('Multiple backend get object', function testSuite() {
}, (err, res) => { }, (err, res) => {
assert.equal(err, null, 'Expected success but got ' + assert.equal(err, null, 'Expected success but got ' +
`error ${err}`); `error ${err}`);
assert.strictEqual(res.ContentLength, '10'); assert.strictEqual(res.ContentLength, 10);
assert.strictEqual(res.Body.toString(), 'helloworld'); assert.strictEqual(res.Body.toString(), 'helloworld');
assert.deepStrictEqual(res.Metadata, assert.deepStrictEqual(res.Metadata,
{ 'scal-location-constraint': awsLocationMismatch }); { 'scal-location-constraint': awsLocationMismatch });
@ -293,7 +293,7 @@ describe('Multiple backend get object', function testSuite() {
(err, res) => { (err, res) => {
assert.equal(err, null, 'Expected success but got ' + assert.equal(err, null, 'Expected success but got ' +
`error ${err}`); `error ${err}`);
assert.strictEqual(res.ContentLength, '10'); assert.strictEqual(res.ContentLength, 10);
assert.strictEqual(res.ContentRange, assert.strictEqual(res.ContentRange,
`bytes 0-9/${bigBodyLen}`); `bytes 0-9/${bigBodyLen}`);
assert.strictEqual(res.ETag, `"${bigMD5}"`); assert.strictEqual(res.ETag, `"${bigMD5}"`);

View File

@ -304,7 +304,7 @@ function testSuite() {
], done); ], done);
}); });
it('should return a ServiceUnavailable if trying to get an object ' + it('should return a LocationNotFound if trying to get an object ' +
'that was deleted in AWS but exists in s3 metadata', 'that was deleted in AWS but exists in s3 metadata',
done => { done => {
const key = `somekey-${genUniqID()}`; const key = `somekey-${genUniqID()}`;
@ -321,14 +321,14 @@ function testSuite() {
err => next(err, s3VerId)), err => next(err, s3VerId)),
(s3VerId, next) => s3.getObject({ Bucket: bucket, Key: key }, (s3VerId, next) => s3.getObject({ Bucket: bucket, Key: key },
err => { err => {
assert.strictEqual(err.code, 'ServiceUnavailable'); assert.strictEqual(err.code, 'LocationNotFound');
assert.strictEqual(err.statusCode, 503); assert.strictEqual(err.statusCode, 424);
next(); next();
}), }),
], done); ], done);
}); });
it('should return a ServiceUnavailable if trying to get a version ' + it('should return a LocationNotFound if trying to get a version ' +
'that was deleted in AWS but exists in s3 metadata', 'that was deleted in AWS but exists in s3 metadata',
done => { done => {
const key = `somekey-${genUniqID()}`; const key = `somekey-${genUniqID()}`;
@ -345,8 +345,8 @@ function testSuite() {
err => next(err, s3VerId)), err => next(err, s3VerId)),
(s3VerId, next) => s3.getObject({ Bucket: bucket, Key: key, (s3VerId, next) => s3.getObject({ Bucket: bucket, Key: key,
VersionId: s3VerId }, err => { VersionId: s3VerId }, err => {
assert.strictEqual(err.code, 'ServiceUnavailable'); assert.strictEqual(err.code, 'LocationNotFound');
assert.strictEqual(err.statusCode, 503); assert.strictEqual(err.statusCode, 424);
next(); next();
}), }),
], done); ], done);

View File

@ -154,7 +154,7 @@ function testSuite() {
Bucket: azureContainerName, Bucket: azureContainerName,
Key: azureObject, Key: azureObject,
}, err => { }, err => {
assert.strictEqual(err.code, 'ServiceUnavailable'); assert.strictEqual(err.code, 'LocationNotFound');
done(); done();
}); });
}); });

View File

@ -113,7 +113,7 @@ describe('Multiple backend get object', function testSuite() {
assert.equal(err, null, assert.equal(err, null,
`Expected success but got error ${err}`); `Expected success but got error ${err}`);
if (range) { if (range) {
assert.strictEqual(res.ContentLength, `${size}`); assert.strictEqual(res.ContentLength, size);
assert.strictEqual(res.ContentRange, contentRange); assert.strictEqual(res.ContentRange, contentRange);
} }
assert.strictEqual(res.ETag, `"${MD5}"`); assert.strictEqual(res.ETag, `"${MD5}"`);

View File

@ -61,11 +61,14 @@ function mpuSetup(key, location, cb) {
Metadata: { 'scal-location-constraint': location }, Metadata: { 'scal-location-constraint': location },
}; };
s3.createMultipartUpload(params, (err, res) => { s3.createMultipartUpload(params, (err, res) => {
if (err) {
return next(err);
}
const uploadId = res.UploadId; const uploadId = res.UploadId;
assert(uploadId); assert(uploadId);
assert.strictEqual(res.Bucket, azureContainerName); assert.strictEqual(res.Bucket, azureContainerName);
assert.strictEqual(res.Key, key); assert.strictEqual(res.Key, key);
next(err, uploadId); return next(null, uploadId);
}); });
}, },
(uploadId, next) => { (uploadId, next) => {
@ -77,8 +80,11 @@ function mpuSetup(key, location, cb) {
Body: smallBody, Body: smallBody,
}; };
s3.uploadPart(partParams, (err, res) => { s3.uploadPart(partParams, (err, res) => {
if (err) {
return next(err);
}
partArray.push({ ETag: res.ETag, PartNumber: 1 }); partArray.push({ ETag: res.ETag, PartNumber: 1 });
next(err, uploadId); return next(null, uploadId);
}); });
}, },
(uploadId, next) => { (uploadId, next) => {
@ -90,8 +96,11 @@ function mpuSetup(key, location, cb) {
Body: bigBody, Body: bigBody,
}; };
s3.uploadPart(partParams, (err, res) => { s3.uploadPart(partParams, (err, res) => {
if (err) {
return next(err);
}
partArray.push({ ETag: res.ETag, PartNumber: 2 }); partArray.push({ ETag: res.ETag, PartNumber: 2 });
next(err, uploadId); return next(null, uploadId);
}); });
}, },
], (err, uploadId) => { ], (err, uploadId) => {

View File

@ -11,7 +11,7 @@ const bucket = `completempugcp${genUniqID()}`;
const smallBody = Buffer.from('I am a body', 'utf8'); const smallBody = Buffer.from('I am a body', 'utf8');
const bigBody = Buffer.alloc(10485760); const bigBody = Buffer.alloc(10485760);
const s3MD5 = 'bfb875032e51cbe2a60c5b6b99a2153f-2'; const s3MD5 = 'bfb875032e51cbe2a60c5b6b99a2153f-2';
const expectedContentLength = '10485771'; const expectedContentLength = 10485771;
const gcpTimeout = 5000; const gcpTimeout = 5000;
let s3; let s3;

View File

@ -31,6 +31,8 @@ const bigMD5 = '5f363e0e58a95f06cbe9bbc662c5dfb6';
const emptyMD5 = 'd41d8cd98f00b204e9800998ecf8427e'; const emptyMD5 = 'd41d8cd98f00b204e9800998ecf8427e';
const locMetaHeader = constants.objectLocationConstraintHeader.substring(11); const locMetaHeader = constants.objectLocationConstraintHeader.substring(11);
const Promise = require('bluebird');
const azureTimeout = 40000; const azureTimeout = 40000;
let bucketUtil; let bucketUtil;

View File

@ -19,6 +19,7 @@ const body = Buffer.from('I am a body', 'utf8');
const correctMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a'; const correctMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a';
const emptyMD5 = 'd41d8cd98f00b204e9800998ecf8427e'; const emptyMD5 = 'd41d8cd98f00b204e9800998ecf8427e';
const locMetaHeader = constants.objectLocationConstraintHeader.substring(11); const locMetaHeader = constants.objectLocationConstraintHeader.substring(11);
const Promise = require('bluebird');
let bucketUtil; let bucketUtil;
let s3; let s3;

View File

@ -70,7 +70,7 @@ function awsGet(key, tagCheck, isEmpty, isMpu, callback) {
assert.strictEqual(res.ETag, `"${correctMD5}"`); assert.strictEqual(res.ETag, `"${correctMD5}"`);
} }
if (tagCheck) { if (tagCheck) {
assert.strictEqual(res.TagCount, '2'); assert.strictEqual(res.TagCount, 2);
} else { } else {
assert.strictEqual(res.TagCount, undefined); assert.strictEqual(res.TagCount, undefined);
} }
@ -114,7 +114,7 @@ function getObject(key, backend, tagCheck, isEmpty, isMpu, callback) {
assert.strictEqual(res.Metadata['scal-location-constraint'], assert.strictEqual(res.Metadata['scal-location-constraint'],
backend); backend);
if (tagCheck) { if (tagCheck) {
assert.strictEqual(res.TagCount, '2'); assert.strictEqual(res.TagCount, 2);
} else { } else {
assert.strictEqual(res.TagCount, undefined); assert.strictEqual(res.TagCount, undefined);
} }

View File

@ -74,7 +74,7 @@ function awsGetCheck(objectKey, s3MD5, awsMD5, location, cb) {
}); });
} }
describe.only('MultipleBackend put object', function testSuite() { describe('MultipleBackend put object', function testSuite() {
this.timeout(250000); this.timeout(250000);
withV4(sigCfg => { withV4(sigCfg => {
beforeEach(() => { beforeEach(() => {

View File

@ -2,6 +2,7 @@ const assert = require('assert');
const crypto = require('crypto'); const crypto = require('crypto');
const { errors, storage } = require('arsenal'); const { errors, storage } = require('arsenal');
const AWS = require('aws-sdk'); const AWS = require('aws-sdk');
AWS.config.logger = console;
const uuid = require('uuid/v4'); const uuid = require('uuid/v4');
const async = require('async'); const async = require('async');
@ -279,7 +280,7 @@ utils.getAndAssertResult = (s3, params, cb) => {
if (expectedTagCount && expectedTagCount === '0') { if (expectedTagCount && expectedTagCount === '0') {
assert.strictEqual(data.TagCount, undefined); assert.strictEqual(data.TagCount, undefined);
} else if (expectedTagCount) { } else if (expectedTagCount) {
assert.strictEqual(data.TagCount, expectedTagCount); assert.strictEqual(data.TagCount, parseInt(expectedTagCount, 10));
} }
return cb(); return cb();
}); });

View File

@ -5,7 +5,7 @@ const BucketUtility = require('../../../lib/utility/bucket-util');
const { makeTagQuery, updateRequestContexts } = const { makeTagQuery, updateRequestContexts } =
require('../../../../../../lib/api/apiUtils/authorization/tagConditionKeys'); require('../../../../../../lib/api/apiUtils/authorization/tagConditionKeys');
const { DummyRequestLogger, TaggingConfigTester, createRequestContext } = require('../../../../../unit/helpers'); const { DummyRequestLogger, TaggingConfigTester, createRequestContext } = require('../../../../../unit/helpers');
const { initMetadata } = require('../../utils/init');
const taggingUtil = new TaggingConfigTester(); const taggingUtil = new TaggingConfigTester();
const log = new DummyRequestLogger(); const log = new DummyRequestLogger();
const bucket = 'bucket2testconditionkeys'; const bucket = 'bucket2testconditionkeys';
@ -14,11 +14,16 @@ const objPutTaggingReq = taggingUtil
.createObjectTaggingRequest('PUT', bucket, object); .createObjectTaggingRequest('PUT', bucket, object);
const requestContexts = [createRequestContext('objectPutTagging', objPutTaggingReq)]; const requestContexts = [createRequestContext('objectPutTagging', objPutTaggingReq)];
describe('Tag condition keys updateRequestContext', () => { const isCEPH = process.env.CI_CEPH !== undefined;
const describeSkipIfCeph = isCEPH ? describe.skip : describe;
describeSkipIfCeph('Tag condition keys updateRequestContext', () => {
withV4(sigCfg => { withV4(sigCfg => {
let bucketUtil; let bucketUtil;
let s3; let s3;
before(done => initMetadata(done));
beforeEach(() => { beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg); bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3; s3 = bucketUtil.s3;

View File

@ -9,6 +9,9 @@ const bucketName = 'testdeletempu';
const objectName = 'key'; const objectName = 'key';
const objectNameTwo = 'secondkey'; const objectNameTwo = 'secondkey';
const isCEPH = process.env.CI_CEPH !== undefined;
const describeSkipIfCeph = isCEPH ? describe.skip : describe;
describe('DELETE object', () => { describe('DELETE object', () => {
withV4(sigCfg => { withV4(sigCfg => {
let uploadId; let uploadId;
@ -95,7 +98,7 @@ describe('DELETE object', () => {
}); });
}); });
describe('with object lock', () => { describeSkipIfCeph('with object lock', () => {
let versionIdOne; let versionIdOne;
let versionIdTwo; let versionIdTwo;
const retainDate = moment().add(10, 'days').toISOString(); const retainDate = moment().add(10, 'days').toISOString();

View File

@ -1050,7 +1050,10 @@ describe('GET object', () => {
}); });
}); });
describe('GET object with object lock', () => { const isCEPH = process.env.CI_CEPH !== undefined;
const describeSkipIfCeph = isCEPH ? describe.skip : describe;
describeSkipIfCeph('GET object with object lock', () => {
withV4(sigCfg => { withV4(sigCfg => {
const bucketUtil = new BucketUtility('default', sigCfg); const bucketUtil = new BucketUtility('default', sigCfg);
const s3 = bucketUtil.s3; const s3 = bucketUtil.s3;

View File

@ -13,7 +13,10 @@ const unlockedBucket = 'mock-bucket-no-lock';
const key = 'mock-object-legalhold'; const key = 'mock-object-legalhold';
const keyNoHold = 'mock-object-no-legalhold'; const keyNoHold = 'mock-object-no-legalhold';
describe('GET object legal hold', () => { const isCEPH = process.env.CI_CEPH !== undefined;
const describeSkipIfCeph = isCEPH ? describe.skip : describe;
describeSkipIfCeph('GET object legal hold', () => {
withV4(sigCfg => { withV4(sigCfg => {
const bucketUtil = new BucketUtility('default', sigCfg); const bucketUtil = new BucketUtility('default', sigCfg);
const s3 = bucketUtil.s3; const s3 = bucketUtil.s3;

View File

@ -33,7 +33,10 @@ const expectedConfig = {
RetainUntilDate: manipulateDate(), RetainUntilDate: manipulateDate(),
}; };
describe('GET object retention', () => { const isCEPH = process.env.CI_CEPH !== undefined;
const describeSkipIfCeph = isCEPH ? describe.skip : describe;
describeSkipIfCeph('GET object retention', () => {
withV4(sigCfg => { withV4(sigCfg => {
const bucketUtil = new BucketUtility('default', sigCfg); const bucketUtil = new BucketUtility('default', sigCfg);
const s3 = bucketUtil.s3; const s3 = bucketUtil.s3;

View File

@ -278,7 +278,10 @@ describe('Multi-Object Delete Access', function access() {
}); });
}); });
describe('Multi-Object Delete with Object Lock', () => { const isCEPH = process.env.CI_CEPH !== undefined;
const describeSkipIfCeph = isCEPH ? describe.skip : describe;
describeSkipIfCeph('Multi-Object Delete with Object Lock', () => {
let bucketUtil; let bucketUtil;
let s3; let s3;
const versionIds = []; const versionIds = [];

View File

@ -1231,7 +1231,10 @@ describe('Object Copy', () => {
}); });
}); });
describe('Object Copy with object lock enabled on both destination ' + const isCEPH = process.env.CI_CEPH !== undefined;
const describeSkipIfCeph = isCEPH ? describe.skip : describe;
describeSkipIfCeph('Object Copy with object lock enabled on both destination ' +
'bucket and source bucket', () => { 'bucket and source bucket', () => {
withV4(sigCfg => { withV4(sigCfg => {
let bucketUtil; let bucketUtil;

View File

@ -535,7 +535,10 @@ describe('HEAD object, conditions', () => {
}); });
}); });
describe('HEAD object with object lock', () => { const isCEPH = process.env.CI_CEPH !== undefined;
const describeSkipIfCeph = isCEPH ? describe.skip : describe;
describeSkipIfCeph('HEAD object with object lock', () => {
withV4(sigCfg => { withV4(sigCfg => {
const bucketUtil = new BucketUtility('default', sigCfg); const bucketUtil = new BucketUtility('default', sigCfg);
const s3 = bucketUtil.s3; const s3 = bucketUtil.s3;

View File

@ -270,7 +270,10 @@ describe('PUT object', () => {
}); });
}); });
describe('PUT object with object lock', () => { const isCEPH = process.env.CI_CEPH !== undefined;
const describeSkipIfCeph = isCEPH ? describe.skip : describe;
describeSkipIfCeph('PUT object with object lock', () => {
withV4(sigCfg => { withV4(sigCfg => {
let bucketUtil; let bucketUtil;
let s3; let s3;

View File

@ -31,7 +31,10 @@ function createLegalHoldParams(bucket, key, status) {
}; };
} }
describe('PUT object legal hold', () => { const isCEPH = process.env.CI_CEPH !== undefined;
const describeSkipIfCeph = isCEPH ? describe.skip : describe;
describeSkipIfCeph('PUT object legal hold', () => {
withV4(sigCfg => { withV4(sigCfg => {
const bucketUtil = new BucketUtility('default', sigCfg); const bucketUtil = new BucketUtility('default', sigCfg);
const s3 = bucketUtil.s3; const s3 = bucketUtil.s3;

View File

@ -15,7 +15,10 @@ const retentionConfig = {
RetainUntilDate: moment().add(1, 'Days').toISOString(), RetainUntilDate: moment().add(1, 'Days').toISOString(),
}; };
describe('PUT object retention', () => { const isCEPH = process.env.CI_CEPH !== undefined;
const describeSkipIfCeph = isCEPH ? describe.skip : describe;
describeSkipIfCeph('PUT object retention', () => {
withV4(sigCfg => { withV4(sigCfg => {
const bucketUtil = new BucketUtility('default', sigCfg); const bucketUtil = new BucketUtility('default', sigCfg);
const s3 = bucketUtil.s3; const s3 = bucketUtil.s3;

View File

@ -0,0 +1,19 @@
const metadata = require('../../../../../lib/metadata/wrapper');
let metadataInit = false;
function initMetadata(done) {
if (metadataInit === true) {
return done();
}
return metadata.setup(err => {
if (err) {
return done(err);
}
metadataInit = true;
return done();
});
}
module.exports = {
initMetadata,
};

View File

@ -16,7 +16,7 @@ function checkNoError(err) {
assert.ifError(err, `Expected success, got error ${JSON.stringify(err)}`); assert.ifError(err, `Expected success, got error ${JSON.stringify(err)}`);
} }
function testVersioning(s3, versioningStatus, replicationStatus, cb) { function testVersioning(s3, versioningStatus, replicationStatus, removeReplication, cb) {
const versioningParams = { Bucket: bucketName, const versioningParams = { Bucket: bucketName,
VersioningConfiguration: { Status: versioningStatus } }; VersioningConfiguration: { Status: versioningStatus } };
const replicationParams = { const replicationParams = {
@ -38,6 +38,12 @@ function testVersioning(s3, versioningStatus, replicationStatus, cb) {
}; };
async.waterfall([ async.waterfall([
cb => s3.putBucketReplication(replicationParams, e => cb(e)), cb => s3.putBucketReplication(replicationParams, e => cb(e)),
cb => {
if (removeReplication) {
return s3.deleteBucketReplication({ Bucket: bucketName }, e => cb(e));
}
return process.nextTick(() => cb());
},
cb => s3.putBucketVersioning(versioningParams, e => cb(e)), cb => s3.putBucketVersioning(versioningParams, e => cb(e)),
], cb); ], cb);
} }
@ -63,15 +69,22 @@ describe('Versioning on a replication source bucket', () => {
it('should not be able to disable versioning if replication enabled', it('should not be able to disable versioning if replication enabled',
done => { done => {
testVersioning(s3, 'Suspended', 'Enabled', err => { testVersioning(s3, 'Suspended', 'Enabled', false, err => {
checkError(err, 'InvalidBucketState'); checkError(err, 'InvalidBucketState');
done(); done();
}); });
}); });
it('should be able to disable versioning if replication disabled', it('should not be able to disable versioning if replication disabled',
done => { done => {
testVersioning(s3, 'Suspended', 'Disabled', err => { testVersioning(s3, 'Suspended', 'Disabled', false, err => {
checkError(err, 'InvalidBucketState');
done();
});
});
it('should be able to disable versioning after removed replication', done => {
testVersioning(s3, 'Suspended', 'Disabled', true, err => {
checkNoError(err); checkNoError(err);
done(); done();
}); });

View File

@ -657,9 +657,9 @@ arraybuffer.slice@~0.0.7:
resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675" resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675"
integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog== integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==
"arsenal@github:scality/Arsenal#372df63": "arsenal@github:scality/Arsenal#bugfix/ZENKO-2153-fix-tests":
version "8.2.1" version "8.2.1"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/372df634c4cf983f26b8a9439ecde90aedeb0645" resolved "https://codeload.github.com/scality/Arsenal/tar.gz/b169bfc4566bc0df3701db41df78dc35a0b5d7d0"
dependencies: dependencies:
"@hapi/joi" "^15.1.0" "@hapi/joi" "^15.1.0"
JSONStream "^1.0.0" JSONStream "^1.0.0"
@ -667,43 +667,7 @@ arraybuffer.slice@~0.0.7:
ajv "6.12.2" ajv "6.12.2"
async "~2.6.1" async "~2.6.1"
aws-sdk "2.80.0" aws-sdk "2.80.0"
azure-storage "^2.1.0" azure-storage "2.10.3"
backo "^1.1.0"
bson "4.0.0"
debug "~4.1.0"
diskusage "^1.1.1"
fcntl "github:scality/node-fcntl"
hdclient scality/hdclient#5145e04e5ed33e85106765b1caa90cd245ef482b
https-proxy-agent "^2.2.0"
ioredis "4.9.5"
ipaddr.js "1.9.1"
level "~5.0.1"
level-sublevel "~6.6.5"
mongodb "^3.0.1"
node-forge "^0.7.1"
prom-client "10.2.3"
simple-glob "^0.2.0"
socket.io "~2.3.0"
socket.io-client "~2.3.0"
sproxydclient "github:scality/sproxydclient#30e7115"
utf8 "3.0.0"
uuid "^3.0.1"
werelogs scality/werelogs#0ff7ec82
xml2js "~0.4.23"
optionalDependencies:
ioctl "2.0.1"
"arsenal@github:scality/Arsenal#5d100645aaa7083ce4195939bf968119e118b93a":
version "8.2.1"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/5d100645aaa7083ce4195939bf968119e118b93a"
dependencies:
"@hapi/joi" "^15.1.0"
JSONStream "^1.0.0"
agentkeepalive "^4.1.3"
ajv "6.12.2"
async "~2.6.1"
aws-sdk "2.80.0"
azure-storage "^2.1.0"
backo "^1.1.0" backo "^1.1.0"
bson "4.0.0" bson "4.0.0"
debug "~4.1.0" debug "~4.1.0"
@ -985,7 +949,7 @@ axios@^0.18.0:
follow-redirects "1.5.10" follow-redirects "1.5.10"
is-buffer "^2.0.2" is-buffer "^2.0.2"
azure-storage@^2.1.0: azure-storage@2.10.3, azure-storage@^2.1.0:
version "2.10.3" version "2.10.3"
resolved "https://registry.yarnpkg.com/azure-storage/-/azure-storage-2.10.3.tgz#c5966bf929d87587d78f6847040ea9a4b1d4a50a" resolved "https://registry.yarnpkg.com/azure-storage/-/azure-storage-2.10.3.tgz#c5966bf929d87587d78f6847040ea9a4b1d4a50a"
integrity sha512-IGLs5Xj6kO8Ii90KerQrrwuJKexLgSwYC4oLWmc11mzKe7Jt2E5IVg+ZQ8K53YWZACtVTMBNO3iGuA+4ipjJxQ== integrity sha512-IGLs5Xj6kO8Ii90KerQrrwuJKexLgSwYC4oLWmc11mzKe7Jt2E5IVg+ZQ8K53YWZACtVTMBNO3iGuA+4ipjJxQ==