Compare commits

...

4 Commits

Author SHA1 Message Date
alexandre merle c3550dd09a cache issue ? 2021-01-25 20:34:26 +01:00
alexandre merle f9230dfbe2 update arsenal 2021-01-25 20:34:26 +01:00
alexandre merle 26018ec875 upgrade aws sdk 2021-01-25 16:27:36 +01:00
alexandre merle f18321e0d8 bugfix: send continue after auth checks 2021-01-25 16:27:36 +01:00
63 changed files with 497 additions and 527 deletions

View File

@ -84,7 +84,7 @@ models:
command: node -v
- ShellCommand: &yarn-install
name: install modules
command: yarn install --frozen-lockfile
command: yarn cache clean && yarn install --frozen-lockfile
haltOnFailure: true
- Upload: &upload-artifacts
source: /artifacts

View File

@ -166,12 +166,13 @@ const api = {
}
returnTagCount = checkedResults;
}
// issue 100 Continue to the client
writeContinue(request, response);
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
request._response = response;
return this[apiMethod](userInfo, request, streamingV4Params,
log, callback, authorizationResults);
}
// issue 100 Continue to the client
writeContinue(request, response);
const MAX_POST_LENGTH = request.method.toUpperCase() === 'POST' ?
1024 * 1024 : 1024 * 1024 / 2; // 1 MB or 512 KB
const post = [];

View File

@ -12,6 +12,7 @@ const { validateHeaders } = require('./apiUtils/object/objectLockHelpers');
const kms = require('../kms/wrapper');
const checkObjectEncryption = require('./apiUtils/object/checkEncryption');
const writeContinue = require('../utilities/writeContinue');
const versionIdUtils = versioning.VersionID;
/**
@ -99,6 +100,7 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
if (objectLockValidationError) {
return next(objectLockValidationError);
}
writeContinue(request, request._response);
return createAndStoreObject(bucketName,
bucket, objectKey, objMD, authInfo, canonicalID, cipherBundle,
request, false, streamingV4Params, log, next);

View File

@ -17,7 +17,7 @@ const { config } = require('../Config');
const multipleBackendGateway = require('../data/multipleBackendGateway');
const locationConstraintCheck
= require('./apiUtils/object/locationConstraintCheck');
const writeContinue = require('../utilities/writeContinue');
const skipError = new Error('skip');
// We pad the partNumbers so that the parts will be sorted in numerical order.
@ -114,6 +114,7 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
log.debug('access denied for user on bucket', { requestType });
return next(errors.AccessDenied, destinationBucket);
}
writeContinue(request, request._response);
return next(null, destinationBucket);
},
// Get bucket server-side encryption, if it exists.
@ -200,6 +201,7 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
// eslint-disable-next-line no-param-reassign
objectLocationConstraint = backendInfoObj.controllingLC;
}
return multipleBackendGateway.uploadPart(request,
streamingV4Params, null, size, objectLocationConstraint,
objectKey, uploadId, partNumber, bucketName, log,

View File

@ -20,9 +20,9 @@
"homepage": "https://github.com/scality/S3#readme",
"dependencies": {
"@hapi/joi": "^17.1.0",
"arsenal": "github:scality/Arsenal#26a00ba",
"arsenal": "github:scality/Arsenal#w/7.9/bugfix/S3C-2201-econnreset-rest-client-keep-alive",
"async": "~2.5.0",
"aws-sdk": "2.363.0",
"aws-sdk": "2.831.0",
"azure-storage": "^2.1.0",
"bucketclient": "scality/bucketclient#6d2d5a4",
"commander": "^2.9.0",
@ -51,8 +51,8 @@
"istanbul-api": "1.0.0-alpha.13",
"lolex": "^1.4.0",
"mocha": "^2.3.4",
"mocha-multi-reporters": "^1.1.7",
"mocha-junit-reporter": "^1.23.1",
"mocha-multi-reporters": "^1.1.7",
"node-mocks-http": "1.5.2",
"s3blaster": "scality/s3blaster#7a836b6",
"tv4": "^1.2.7"

View File

@ -1,4 +1,4 @@
const Promise = require('bluebird');
const bluebird = require('bluebird');
const { S3 } = require('aws-sdk');
const projectFixture = require('../fixtures/project');
const getConfig = require('../../test/support/config');
@ -7,20 +7,24 @@ class BucketUtility {
constructor(profile = 'default', config = {}) {
const s3Config = getConfig(profile, config);
this.s3 = Promise.promisifyAll(new S3(s3Config), { suffix: 'Promise' });
this.s3 = new S3(s3Config);
this.s3.config.setPromisesDependency(bluebird);
this.s3.config.update({
maxRetries: 0,
});
}
createOne(bucketName) {
return this.s3
.createBucketPromise({ Bucket: bucketName })
.createBucket({ Bucket: bucketName }).promise()
.then(() => bucketName);
}
createOneWithLock(bucketName) {
return this.s3.createBucketPromise({
return this.s3.createBucket({
Bucket: bucketName,
ObjectLockEnabledForBucket: true,
})
}).promise()
.then(() => bucketName);
}
@ -48,7 +52,7 @@ class BucketUtility {
deleteOne(bucketName) {
return this.s3
.deleteBucketPromise({ Bucket: bucketName });
.deleteBucket({ Bucket: bucketName }).promise();
}
deleteMany(bucketNames) {
@ -71,39 +75,39 @@ class BucketUtility {
};
return this.s3
.listObjectVersionsPromise(param)
.listObjectVersions(param).promise()
.then(data =>
Promise.all(
data.Versions
.filter(object => !object.Key.endsWith('/'))
// remove all objects
.map(object =>
this.s3.deleteObjectPromise({
this.s3.deleteObject({
Bucket: bucketName,
Key: object.Key,
VersionId: object.VersionId,
})
}).promise()
.then(() => object)
)
.concat(data.Versions
.filter(object => object.Key.endsWith('/'))
// remove all directories
.map(object =>
this.s3.deleteObjectPromise({
this.s3.deleteObject({
Bucket: bucketName,
Key: object.Key,
VersionId: object.VersionId,
})
}).promise()
.then(() => object)
)
)
.concat(data.DeleteMarkers
.map(object =>
this.s3.deleteObjectPromise({
this.s3.deleteObject({
Bucket: bucketName,
Key: object.Key,
VersionId: object.VersionId,
})
}).promise()
.then(() => object)))
)
);
@ -111,7 +115,7 @@ class BucketUtility {
getOwner() {
return this.s3
.listBucketsPromise()
.listBuckets().promise()
.then(data => data.Owner);
}
}

View File

@ -37,7 +37,7 @@ describe('DELETE bucket cors', () => {
});
describe('with existing bucket', () => {
beforeEach(() => s3.createBucketPromise({ Bucket: bucketName }));
beforeEach(() => s3.createBucket({ Bucket: bucketName }).promise());
afterEach(() => bucketUtil.deleteOne(bucketName));
describe('without existing cors configuration', () => {

View File

@ -25,7 +25,7 @@ describe('DELETE bucket website', () => {
});
describe('with existing bucket', () => {
beforeEach(() => s3.createBucketPromise({ Bucket: bucketName }));
beforeEach(() => s3.createBucket({ Bucket: bucketName }).promise());
afterEach(() => bucketUtil.deleteOne(bucketName));
describe('without existing configuration', () => {

View File

@ -348,9 +348,9 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
Promise
.mapSeries(test.objectPutParams(Bucket),
param => s3.putObjectPromise(param))
param => s3.putObject(param).promise())
.then(() =>
s3.listObjectsPromise(test.listObjectParams(Bucket)))
s3.listObjects(test.listObjectParams(Bucket)).promise())
.then(data => {
const isValidResponse =
tv4.validate(data, bucketSchema);
@ -373,9 +373,9 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
Promise
.mapSeries(test.objectPutParams(Bucket),
param => s3.putObjectPromise(param))
param => s3.putObject(param).promise())
.then(() =>
s3.listObjectsV2Promise(test.listObjectParams(Bucket)))
s3.listObjectsV2(test.listObjectParams(Bucket)).promise())
.then(data => {
const isValidResponse =
tv4.validate(data, bucketSchemaV2);
@ -398,8 +398,8 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
const objects = [{ Bucket, Key: k }];
Promise
.mapSeries(objects, param => s3.putObjectPromise(param))
.then(() => s3.listObjectsPromise({ Bucket, Prefix: k }))
.mapSeries(objects, param => s3.putObject(param).promise())
.then(() => s3.listObjects({ Bucket, Prefix: k }).promise())
.then(data => {
const isValidResponse = tv4.validate(data,
bucketSchema);
@ -422,8 +422,8 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
const objects = [{ Bucket, Key: k }];
Promise
.mapSeries(objects, param => s3.putObjectPromise(param))
.then(() => s3.listObjectsPromise({ Bucket, Marker: k }))
.mapSeries(objects, param => s3.putObject(param).promise())
.then(() => s3.listObjects({ Bucket, Marker: k }).promise())
.then(data => {
const isValidResponse = tv4.validate(data,
bucketSchema);
@ -446,9 +446,9 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
const objects = [{ Bucket, Key: k }, { Bucket, Key: 'zzz' }];
Promise
.mapSeries(objects, param => s3.putObjectPromise(param))
.then(() => s3.listObjectsPromise({ Bucket, MaxKeys: 1,
Delimiter: 'foo' }))
.mapSeries(objects, param => s3.putObject(param).promise())
.then(() => s3.listObjects({ Bucket, MaxKeys: 1,
Delimiter: 'foo' }).promise())
.then(data => {
const isValidResponse = tv4.validate(data,
bucketSchema);
@ -471,9 +471,9 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
const objects = [{ Bucket, Key: k }];
Promise
.mapSeries(objects, param => s3.putObjectPromise(param))
.then(() => s3.listObjectsV2Promise(
{ Bucket, StartAfter: k }))
.mapSeries(objects, param => s3.putObject(param).promise())
.then(() => s3.listObjectsV2(
{ Bucket, StartAfter: k }).promise())
.then(data => {
const isValidResponse = tv4.validate(data,
bucketSchemaV2);
@ -497,9 +497,9 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
const objects = [{ Bucket, Key: k }];
Promise
.mapSeries(objects, param => s3.putObjectPromise(param))
.then(() => s3.listObjectsV2Promise(
{ Bucket, ContinuationToken: generateToken(k) }))
.mapSeries(objects, param => s3.putObject(param).promise())
.then(() => s3.listObjectsV2(
{ Bucket, ContinuationToken: generateToken(k) }).promise())
.then(data => {
const isValidResponse = tv4.validate(data,
bucketSchemaV2);
@ -523,9 +523,9 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
const Bucket = bucketName;
const objects = [{ Bucket, Key: k }, { Bucket, Key: 'zzz' }];
Promise
.mapSeries(objects, param => s3.putObjectPromise(param))
.then(() => s3.listObjectsV2Promise({ Bucket, MaxKeys: 1,
Delimiter: 'foo' }))
.mapSeries(objects, param => s3.putObject(param).promise())
.then(() => s3.listObjectsV2({ Bucket, MaxKeys: 1,
Delimiter: 'foo' }).promise())
.then(data => {
const isValidResponse = tv4.validate(data,
bucketSchemaV2);

View File

@ -26,11 +26,11 @@ describe('GET bucket cors', () => {
MaxAgeSeconds: 3000 },
] };
before(() =>
s3.createBucketPromise({ Bucket: bucketName })
.then(() => s3.putBucketCorsPromise({
s3.createBucket({ Bucket: bucketName }).promise()
.then(() => s3.putBucketCors({
Bucket: bucketName,
CORSConfiguration: sampleCors,
})));
}).promise()));
it('should return cors configuration successfully', done => {
s3.getBucketCors({ Bucket: bucketName }, (err, data) => {
@ -51,11 +51,11 @@ describe('GET bucket cors', () => {
AllowedHeaders: [testValue] },
] };
before(() =>
s3.createBucketPromise({ Bucket: bucketName })
.then(() => s3.putBucketCorsPromise({
s3.createBucket({ Bucket: bucketName }).promise()
.then(() => s3.putBucketCors({
Bucket: bucketName,
CORSConfiguration: sampleCors,
})));
}).promise()));
it('should be preserved when putting / getting cors resource',
done => {
@ -75,11 +75,11 @@ describe('GET bucket cors', () => {
AllowedOrigins: ['http://www.example.com'] },
] };
before(() =>
s3.createBucketPromise({ Bucket: bucketName })
.then(() => s3.putBucketCorsPromise({
s3.createBucket({ Bucket: bucketName }).promise()
.then(() => s3.putBucketCors({
Bucket: bucketName,
CORSConfiguration: sampleCors,
})));
}).promise()));
it('should be preserved when retrieving cors resource',
done => {

View File

@ -23,13 +23,13 @@ describeSkipAWS('GET bucket location ', () => {
return;
}
describe(`with location: ${location}`, () => {
before(done => s3.createBucketPromise(
before(() => s3.createBucket(
{
Bucket: bucketName,
CreateBucketConfiguration: {
LocationConstraint: location,
},
}, done));
}).promise());
after(() => bucketUtil.deleteOne(bucketName));
it(`should return location configuration: ${location} ` +
@ -48,13 +48,13 @@ describeSkipAWS('GET bucket location ', () => {
});
describe('with location us-east-1', () => {
before(done => s3.createBucketPromise(
before(() => s3.createBucket(
{
Bucket: bucketName,
CreateBucketConfiguration: {
LocationConstraint: 'us-east-1',
},
}, done));
}).promise());
afterEach(() => bucketUtil.deleteOne(bucketName));
it('should return empty location',
done => {
@ -105,13 +105,13 @@ describeSkipAWS('GET bucket location ', () => {
});
describe('with location configuration', () => {
before(done => s3.createBucketPromise(
before(() => s3.createBucket(
{
Bucket: bucketName,
CreateBucketConfiguration: {
LocationConstraint: 'us-east-1',
},
}, done));
}).promise());
after(() => bucketUtil.deleteOne(bucketName));
it('should return AccessDenied if user is not bucket owner',

View File

@ -31,11 +31,11 @@ describe('GET bucket website', () => {
describe('with existing bucket configuration', () => {
before(() =>
s3.createBucketPromise({ Bucket: bucketName })
.then(() => s3.putBucketWebsitePromise({
s3.createBucket({ Bucket: bucketName }).promise()
.then(() => s3.putBucketWebsite({
Bucket: bucketName,
WebsiteConfiguration: config,
})));
}).promise()));
it('should return bucket website xml successfully', done => {
s3.getBucketWebsite({ Bucket: bucketName }, (err, data) => {

View File

@ -258,7 +258,7 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
() => {
after(() => bucketUtil.deleteOne(bucketName));
it(`should create bucket with location: ${location}`, done => {
bucketUtil.s3.createBucketPromise(
bucketUtil.s3.createBucket(
{
Bucket: bucketName,
CreateBucketConfiguration: {
@ -271,7 +271,7 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
describe('bucket creation with invalid location', () => {
it('should return errors InvalidLocationConstraint', done => {
bucketUtil.s3.createBucketPromise(
bucketUtil.s3.createBucket(
{
Bucket: bucketName,
CreateBucketConfiguration: {

View File

@ -1,3 +1,4 @@
--recursive
--timeout 40000
--ui tdd
--bail

View File

@ -149,11 +149,11 @@ function testSuite() {
process.stdout.write('Creating bucket');
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucket,
return s3.createBucket({ Bucket: bucket,
CreateBucketConfiguration: {
LocationConstraint: awsLocation,
},
})
}).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;

View File

@ -38,39 +38,39 @@ describeSkipIfNotMultiple('Multiple backend delete', () => {
process.stdout.write('Putting object to mem\n');
const params = { Bucket: bucket, Key: memObject, Body: body,
Metadata: { 'scal-location-constraint': memLocation } };
return s3.putObjectPromise(params);
return s3.putObject(params).promise();
})
.then(() => {
process.stdout.write('Putting object to file\n');
const params = { Bucket: bucket, Key: fileObject, Body: body,
Metadata: { 'scal-location-constraint': fileLocation } };
return s3.putObjectPromise(params);
return s3.putObject(params).promise();
})
.then(() => {
process.stdout.write('Putting object to AWS\n');
const params = { Bucket: bucket, Key: awsObject, Body: body,
Metadata: { 'scal-location-constraint': awsLocation } };
return s3.putObjectPromise(params);
return s3.putObject(params).promise();
})
.then(() => {
process.stdout.write('Putting 0-byte object to AWS\n');
const params = { Bucket: bucket, Key: emptyObject,
Metadata: { 'scal-location-constraint': awsLocation } };
return s3.putObjectPromise(params);
return s3.putObject(params).promise();
})
.then(() => {
process.stdout.write('Putting large object to AWS\n');
const params = { Bucket: bucket, Key: bigObject,
Body: bigBody,
Metadata: { 'scal-location-constraint': awsLocation } };
return s3.putObjectPromise(params);
return s3.putObject(params).promise();
})
.then(() => {
process.stdout.write('Putting object to AWS\n');
const params = { Bucket: bucket, Key: mismatchObject,
Body: body, Metadata:
{ 'scal-location-constraint': awsLocationMismatch } };
return s3.putObjectPromise(params);
return s3.putObject(params).promise();
})
.catch(err => {
process.stdout.write(`Error putting objects: ${err}\n`);

View File

@ -119,7 +119,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
process.stdout.write('Creating bucket\n');
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucket })
return s3.createBucket({ Bucket: bucket }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;
@ -493,7 +493,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
process.stdout.write('Creating bucket\n');
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise(createBucketParams)
return s3.createBucket(createBucketParams).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;

View File

@ -36,7 +36,7 @@ function testSuite() {
process.stdout.write('Creating bucket');
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: azureContainerName })
return s3.createBucket({ Bucket: azureContainerName }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;

View File

@ -34,7 +34,7 @@ describe('Multiple backend get object', function testSuite() {
process.stdout.write('Creating bucket');
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucket })
return s3.createBucket({ Bucket: bucket }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;
@ -183,46 +183,46 @@ describe('Multiple backend get object', function testSuite() {
'(mem/file/AWS)', () => {
before(() => {
process.stdout.write('Putting object to mem\n');
return s3.putObjectPromise({ Bucket: bucket, Key: memObject,
return s3.putObject({ Bucket: bucket, Key: memObject,
Body: body,
Metadata: { 'scal-location-constraint': memLocation } })
Metadata: { 'scal-location-constraint': memLocation } }).promise()
.then(() => {
process.stdout.write('Putting object to file\n');
return s3.putObjectPromise({ Bucket: bucket,
return s3.putObject({ Bucket: bucket,
Key: fileObject,
Body: body,
Metadata:
{ 'scal-location-constraint': fileLocation },
});
}).promise();
})
.then(() => {
process.stdout.write('Putting object to AWS\n');
return s3.putObjectPromise({ Bucket: bucket, Key: awsObject,
return s3.putObject({ Bucket: bucket, Key: awsObject,
Body: body,
Metadata: {
'scal-location-constraint': awsLocation } });
'scal-location-constraint': awsLocation } }).promise();
})
.then(() => {
process.stdout.write('Putting 0-byte object to mem\n');
return s3.putObjectPromise({ Bucket: bucket,
return s3.putObject({ Bucket: bucket,
Key: emptyObject,
Metadata:
{ 'scal-location-constraint': memLocation },
});
}).promise();
})
.then(() => {
process.stdout.write('Putting 0-byte object to AWS\n');
return s3.putObjectPromise({ Bucket: bucket,
return s3.putObject({ Bucket: bucket,
Key: emptyAwsObject,
Metadata: {
'scal-location-constraint': awsLocation } });
'scal-location-constraint': awsLocation } }).promise();
})
.then(() => {
process.stdout.write('Putting large object to AWS\n');
return s3.putObjectPromise({ Bucket: bucket,
return s3.putObject({ Bucket: bucket,
Key: bigObject, Body: bigBody,
Metadata: {
'scal-location-constraint': awsLocation } });
'scal-location-constraint': awsLocation } }).promise();
})
.catch(err => {
process.stdout.write(`Error putting objects: ${err}\n`);

View File

@ -46,7 +46,7 @@ function testSuite() {
process.stdout.write('Creating bucket');
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucket })
return s3.createBucket({ Bucket: bucket }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;

View File

@ -32,7 +32,7 @@ function testSuite() {
process.stdout.write('Creating bucket');
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: azureContainerName })
return s3.createBucket({ Bucket: azureContainerName }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;

View File

@ -20,20 +20,20 @@ describeSkipIfNotMultiple('List parts of MPU on Azure data backend', () => {
this.currentTest.key = `somekey-${Date.now()}`;
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: azureContainerName })
.then(() => s3.createMultipartUploadPromise({
return s3.createBucket({ Bucket: azureContainerName }).promise()
.then(() => s3.createMultipartUpload({
Bucket: azureContainerName, Key: this.currentTest.key,
Metadata: { 'scal-location-constraint': azureLocation } }))
Metadata: { 'scal-location-constraint': azureLocation } }).promise())
.then(res => {
this.currentTest.uploadId = res.UploadId;
return s3.uploadPartPromise({ Bucket: azureContainerName,
return s3.uploadPart({ Bucket: azureContainerName,
Key: this.currentTest.key, PartNumber: 1,
UploadId: this.currentTest.uploadId, Body: bodyFirstPart });
UploadId: this.currentTest.uploadId, Body: bodyFirstPart }).promise();
}).then(res => {
this.currentTest.firstEtag = res.ETag;
}).then(() => s3.uploadPartPromise({ Bucket: azureContainerName,
}).then(() => s3.uploadPart({ Bucket: azureContainerName,
Key: this.currentTest.key, PartNumber: 2,
UploadId: this.currentTest.uploadId, Body: bodySecondPart })
UploadId: this.currentTest.uploadId, Body: bodySecondPart }).promise()
).then(res => {
this.currentTest.secondEtag = res.ETag;
})
@ -45,10 +45,10 @@ describeSkipIfNotMultiple('List parts of MPU on Azure data backend', () => {
afterEach(function afterEachFn() {
process.stdout.write('Emptying bucket');
return s3.abortMultipartUploadPromise({
return s3.abortMultipartUpload({
Bucket: azureContainerName, Key: this.currentTest.key,
UploadId: this.currentTest.uploadId,
})
}).promise()
.then(() => bucketUtil.empty(azureContainerName))
.then(() => {
process.stdout.write('Deleting bucket');

View File

@ -109,7 +109,7 @@ function testSuite() {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
this.currentTest.awsClient = awsS3;
return s3.createBucketPromise({ Bucket: azureContainerName })
return s3.createBucket({ Bucket: azureContainerName }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;

View File

@ -121,6 +121,7 @@ function testSuite() {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
process.stdout.write('Creating bucket\n');
s3.createBucketPromise = Promise.promisify(s3.createBucket);
if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
s3.createBucketPromise = createEncryptedBucketPromise;
}

View File

@ -113,6 +113,7 @@ function testSuite() {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
process.stdout.write('Creating bucket\n');
s3.createBucketPromise = Promise.promisify(s3.createBucket);
if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
s3.createBucketPromise = createEncryptedBucketPromise;
}

View File

@ -167,7 +167,7 @@ function testSuite() {
beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucket })
return s3.createBucket({ Bucket: bucket }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;

View File

@ -79,6 +79,7 @@ describe('MultipleBackend put object', function testSuite() {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
process.stdout.write('Creating bucket\n');
s3.createBucketPromise = Promise.promisify(s3.createBucket);
if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
s3.createBucketPromise = createEncryptedBucketPromise;
}

View File

@ -108,7 +108,7 @@ describeF() {
describe('with no bucket location header', () => {
beforeEach(() =>
s3.createBucketPromise({ Bucket: azureContainerName })
s3.createBucket({ Bucket: azureContainerName }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;

View File

@ -19,6 +19,7 @@ const describeSkipIfE2E = process.env.S3_END_TO_END ? describe.skip : describe;
class ContinueRequestHandler {
constructor(path) {
this.path = path;
this.expectHeader = '100-continue';
return this;
}
@ -40,7 +41,7 @@ class ContinueRequestHandler {
method: 'PUT',
headers: {
'content-length': body.length,
'Expect': this.expectHeader || '100-continue',
'Expect': this.expectHeader,
},
};
}
@ -49,11 +50,15 @@ class ContinueRequestHandler {
const options = this.getRequestOptions();
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
const req = transport.request(options, res => {
assert.strictEqual(res.statusCode, statusCode);
return cb();
res.on('data', () => {});
res.on('end', () => {
assert.strictEqual(res.statusCode, statusCode);
return cb();
});
});
// Send the body either on the continue event, or immediately.
if (this.expectHeader === '100-continue') {
req.flushHeaders();
req.on('continue', () => req.end(body));
} else {
req.end(body);
@ -64,20 +69,44 @@ class ContinueRequestHandler {
const options = this.getRequestOptions();
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
const req = transport.request(options);
req.flushHeaders();
// At this point we have only sent the header.
assert(req.output.length === 1);
const headerLen = req.output[0].length;
const headerLen = req._header.length;
req.on('continue', () => {
// Has only the header been sent?
assert.strictEqual(req.socket.bytesWritten, headerLen);
// Send the body since the continue event has been emitted.
return req.end(body);
});
req.on('close', () => {
const expected = body.length + headerLen;
// Has the entire body been sent?
assert.strictEqual(req.socket.bytesWritten, expected);
return cb();
req.on('response', res => {
res.on('data', () => {});
res.on('end', () => {
const expected = body.length + headerLen;
// Has the entire body been sent?
assert.strictEqual(req.socket.bytesWritten, expected);
return cb();
});
res.on('error', err => cb(err));
});
req.on('error', err => cb(err));
}
shouldNotGetContinue(cb) {
const options = this.getRequestOptions();
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
const req = transport.request(options);
req.flushHeaders();
// At this point we have only sent the header.
const headerLen = req._header.length;
req.on('continue', () => cb('Continue beeing seen when 403 is expected'));
req.on('response', res => {
res.on('data', () => {});
res.on('end', () => {
const expected = headerLen;
assert.strictEqual(req.socket.bytesWritten, expected);
return cb();
});
res.on('error', err => cb(err));
});
req.on('error', err => cb(err));
}
@ -100,7 +129,7 @@ describeSkipIfE2E('PUT public object with 100-continue header', () => {
const signedUrl = s3.getSignedUrl('putObject', params);
const { path } = url.parse(signedUrl);
continueRequest = new ContinueRequestHandler(path);
return s3.createBucketPromise({ Bucket: bucket });
return s3.createBucket({ Bucket: bucket }).promise();
});
afterEach(() =>
@ -125,8 +154,8 @@ describeSkipIfE2E('PUT public object with 100-continue header', () => {
it('should wait for continue event before sending body', done =>
continueRequest.sendsBodyOnContinue(done));
it('should continue if a public user', done =>
it('should not send continue if denied for a public user', done =>
continueRequest.setRequestPath(invalidSignedURL)
.sendsBodyOnContinue(done));
.shouldNotGetContinue(done));
});
});

View File

@ -23,13 +23,13 @@ describe('Abort MPU', () => {
beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucket })
.then(() => s3.createMultipartUploadPromise({
Bucket: bucket, Key: key }))
return s3.createBucket({ Bucket: bucket }).promise()
.then(() => s3.createMultipartUpload({
Bucket: bucket, Key: key }).promise())
.then(res => {
uploadId = res.UploadId;
return s3.uploadPartPromise({ Bucket: bucket, Key: key,
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
return s3.uploadPart({ Bucket: bucket, Key: key,
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart }).promise();
})
.catch(err => {
process.stdout.write(`Error in beforeEach: ${err}\n`);
@ -38,11 +38,11 @@ describe('Abort MPU', () => {
});
afterEach(() =>
s3.abortMultipartUploadPromise({
s3.abortMultipartUpload({
Bucket: bucket,
Key: key,
UploadId: uploadId,
})
}).promise()
.then(() => bucketUtil.empty(bucket))
.then(() => bucketUtil.deleteOne(bucket))
);
@ -71,7 +71,7 @@ describe('Abort MPU - No Such Upload', () => {
beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucket });
return s3.createBucket({ Bucket: bucket }).promise();
});
afterEach(() => bucketUtil.deleteOne(bucket));

View File

@ -22,7 +22,7 @@ describe('Tag condition keys updateRequestContext', () => {
beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucket })
return s3.createBucket({ Bucket: bucket }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;
@ -39,7 +39,7 @@ describe('Tag condition keys updateRequestContext', () => {
it('should update request contexts with request tags and existing object tags', done => {
const tagsToExist = 'oneKey=oneValue&twoKey=twoValue';
const params = { Bucket: bucket, Key: object, Tagging: tagsToExist };
s3.putObject(params, err => {
return s3.putObject(params, err => {
assert.ifError(err);
updateRequestContexts(objPutTaggingReq, requestContexts, 'objectPutTagging', log,
(err, newRequestContexts) => {

View File

@ -56,12 +56,17 @@ describe('Complete MPU', () => {
function _initiateMpuAndPutOnePart() {
const result = {};
return s3.createMultipartUploadPromise({
Bucket: bucket, Key: key })
return s3.createMultipartUpload({
Bucket: bucket, Key: key }).promise()
.then(data => {
result.uploadId = data.UploadId;
return s3.uploadPartPromise({ Bucket: bucket, Key: key,
PartNumber: 1, UploadId: data.UploadId, Body: 'foo' });
return s3.uploadPart({
Bucket: bucket,
Key: key,
PartNumber: 1,
UploadId: data.UploadId,
Body: 'foo',
}).promise();
})
.then(data => {
result.eTag = data.ETag;
@ -107,8 +112,8 @@ describe('Complete MPU', () => {
let uploadId;
let eTag;
beforeEach(() => s3.putBucketVersioningPromise({ Bucket: bucket,
VersioningConfiguration: versioningEnabled })
beforeEach(() => s3.putBucketVersioning({ Bucket: bucket,
VersioningConfiguration: versioningEnabled }).promise()
.then(() => _initiateMpuAndPutOnePart())
.then(result => {
uploadId = result.uploadId;
@ -126,8 +131,8 @@ describe('Complete MPU', () => {
let uploadId;
let eTag;
beforeEach(() => s3.putBucketVersioningPromise({ Bucket: bucket,
VersioningConfiguration: versioningSuspended })
beforeEach(() => s3.putBucketVersioning({ Bucket: bucket,
VersioningConfiguration: versioningSuspended }).promise()
.then(() => _initiateMpuAndPutOnePart())
.then(result => {
uploadId = result.uploadId;

View File

@ -40,6 +40,7 @@ describe('Object Part Copy', () => {
beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
s3.createBucketPromise = Promise.promisify(s3.createBucket);
if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
s3.createBucketPromise = createEncryptedBucketPromise;
}
@ -54,22 +55,22 @@ describe('Object Part Copy', () => {
throw err;
})
.then(() =>
s3.putObjectPromise({
s3.putObject({
Bucket: sourceBucketName,
Key: sourceObjName,
Body: content,
}))
}).promise())
.then(res => {
etag = res.ETag;
return s3.headObjectPromise({
return s3.headObject({
Bucket: sourceBucketName,
Key: sourceObjName,
});
}).promise();
}).then(() =>
s3.createMultipartUploadPromise({
s3.createMultipartUpload({
Bucket: destBucketName,
Key: destObjName,
})).then(iniateRes => {
}).promise()).then(iniateRes => {
uploadId = iniateRes.UploadId;
}).catch(err => {
process.stdout.write(`Error in outer beforeEach: ${err}\n`);
@ -79,11 +80,11 @@ describe('Object Part Copy', () => {
afterEach(() => bucketUtil.empty(sourceBucketName)
.then(() => bucketUtil.empty(destBucketName))
.then(() => s3.abortMultipartUploadPromise({
.then(() => s3.abortMultipartUpload({
Bucket: destBucketName,
Key: destObjName,
UploadId: uploadId,
}))
}).promise())
.catch(err => {
if (err.code !== 'NoSuchUpload') {
process.stdout.write(`Error in afterEach: ${err}\n`);
@ -335,10 +336,10 @@ describe('Object Part Copy', () => {
const otherPartBuff = Buffer.alloc(5242880, 1);
otherMd5HashPart.update(otherPartBuff);
const otherPartHash = otherMd5HashPart.digest('hex');
return s3.createMultipartUploadPromise({
return s3.createMultipartUpload({
Bucket: sourceBucketName,
Key: sourceMpuKey,
}).then(iniateRes => {
}).promise().then(iniateRes => {
sourceMpuId = iniateRes.UploadId;
}).catch(err => {
process.stdout.write(`Error initiating MPU ' +
@ -349,13 +350,13 @@ describe('Object Part Copy', () => {
for (let i = 1; i < 10; i++) {
const partBuffHere = i % 2 ? partBuff : otherPartBuff;
const partHashHere = i % 2 ? partHash : otherPartHash;
partUploads.push(s3.uploadPartPromise({
partUploads.push(s3.uploadPart({
Bucket: sourceBucketName,
Key: sourceMpuKey,
PartNumber: i,
UploadId: sourceMpuId,
Body: partBuffHere,
}));
}).promise());
parts.push({
ETag: partHashHere,
PartNumber: i,
@ -369,14 +370,14 @@ describe('Object Part Copy', () => {
throw err;
}).then(() => {
process.stdout.write('completing mpu');
return s3.completeMultipartUploadPromise({
return s3.completeMultipartUpload({
Bucket: sourceBucketName,
Key: sourceMpuKey,
UploadId: sourceMpuId,
MultipartUpload: {
Parts: parts,
},
});
}).promise();
}).then(() => {
process.stdout.write('finished completing mpu');
}).catch(err => {
@ -385,11 +386,11 @@ describe('Object Part Copy', () => {
});
});
afterEach(() => s3.abortMultipartUploadPromise({
afterEach(() => s3.abortMultipartUpload({
Bucket: sourceBucketName,
Key: sourceMpuKey,
UploadId: sourceMpuId,
}).catch(err => {
}).promise().catch(err => {
if (err.code !== 'NoSuchUpload'
&& err.code !== 'NoSuchBucket') {
process.stdout.write(`Error in afterEach: ${err}\n`);
@ -418,27 +419,27 @@ describe('Object Part Copy', () => {
it('should copy two parts from a source bucket to a different ' +
'destination bucket and complete the MPU', () => {
process.stdout.write('Putting first part in MPU test');
return s3.uploadPartCopyPromise({ Bucket: destBucketName,
return s3.uploadPartCopy({ Bucket: destBucketName,
Key: destObjName,
CopySource: `${sourceBucketName}/${sourceMpuKey}`,
PartNumber: 1,
UploadId: uploadId,
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.ETag, totalMpuObjectHash);
assert(res.LastModified);
}).then(() => {
process.stdout.write('Putting second part in MPU test');
return s3.uploadPartCopyPromise({ Bucket: destBucketName,
return s3.uploadPartCopy({ Bucket: destBucketName,
Key: destObjName,
CopySource: `${sourceBucketName}/${sourceMpuKey}`,
PartNumber: 2,
UploadId: uploadId,
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.ETag, totalMpuObjectHash);
assert(res.LastModified);
}).then(() => {
process.stdout.write('Completing MPU');
return s3.completeMultipartUploadPromise({
return s3.completeMultipartUpload({
Bucket: destBucketName,
Key: destObjName,
UploadId: uploadId,
@ -448,7 +449,7 @@ describe('Object Part Copy', () => {
{ ETag: totalMpuObjectHash, PartNumber: 2 },
],
},
});
}).promise();
}).then(res => {
assert.strictEqual(res.Bucket, destBucketName);
assert.strictEqual(res.Key, destObjName);
@ -472,29 +473,29 @@ describe('Object Part Copy', () => {
// with number of parts at the end)
const finalCombinedETag =
'"e08ede4e8b942e18537cb2289f613ae3-2"';
return s3.uploadPartCopyPromise({ Bucket: destBucketName,
return s3.uploadPartCopy({ Bucket: destBucketName,
Key: destObjName,
CopySource: `${sourceBucketName}/${sourceMpuKey}`,
PartNumber: 1,
UploadId: uploadId,
CopySourceRange: 'bytes=5242890-15242880',
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.ETag, part1ETag);
assert(res.LastModified);
}).then(() => {
process.stdout.write('Putting second part in MPU test');
return s3.uploadPartCopyPromise({ Bucket: destBucketName,
return s3.uploadPartCopy({ Bucket: destBucketName,
Key: destObjName,
CopySource: `${sourceBucketName}/${sourceMpuKey}`,
PartNumber: 2,
UploadId: uploadId,
CopySourceRange: 'bytes=15242891-30242991',
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.ETag, part2ETag);
assert(res.LastModified);
}).then(() => {
process.stdout.write('Completing MPU');
return s3.completeMultipartUploadPromise({
return s3.completeMultipartUpload({
Bucket: destBucketName,
Key: destObjName,
UploadId: uploadId,
@ -504,17 +505,17 @@ describe('Object Part Copy', () => {
{ ETag: part2ETag, PartNumber: 2 },
],
},
});
}).promise();
}).then(res => {
assert.strictEqual(res.Bucket, destBucketName);
assert.strictEqual(res.Key, destObjName);
assert.strictEqual(res.ETag, finalCombinedETag);
}).then(() => {
process.stdout.write('Getting new object');
return s3.getObjectPromise({
return s3.getObject({
Bucket: destBucketName,
Key: destObjName,
});
}).promise();
}).then(res => {
assert.strictEqual(res.ContentLength, 25000092);
assert.strictEqual(res.ETag, finalCombinedETag);
@ -529,27 +530,27 @@ describe('Object Part Copy', () => {
// AWS response etag for this completed MPU
const finalObjETag = '"db77ebbae9e9f5a244a26b86193ad818-1"';
process.stdout.write('Putting first part in MPU test');
return s3.uploadPartCopyPromise({ Bucket: destBucketName,
return s3.uploadPartCopy({ Bucket: destBucketName,
Key: destObjName,
CopySource: `${sourceBucketName}/${sourceMpuKey}`,
PartNumber: 1,
UploadId: uploadId,
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.ETag, totalMpuObjectHash);
assert(res.LastModified);
}).then(() => {
process.stdout.write('Overwriting first part in MPU test');
return s3.uploadPartCopyPromise({ Bucket: destBucketName,
return s3.uploadPartCopy({ Bucket: destBucketName,
Key: destObjName,
CopySource: `${sourceBucketName}/${sourceObjName}`,
PartNumber: 1,
UploadId: uploadId });
UploadId: uploadId }).promise();
}).then(res => {
assert.strictEqual(res.ETag, etag);
assert(res.LastModified);
}).then(() => {
process.stdout.write('Completing MPU');
return s3.completeMultipartUploadPromise({
return s3.completeMultipartUpload({
Bucket: destBucketName,
Key: destObjName,
UploadId: uploadId,
@ -558,7 +559,7 @@ describe('Object Part Copy', () => {
{ ETag: etag, PartNumber: 1 },
],
},
});
}).promise();
}).then(res => {
assert.strictEqual(res.Bucket, destBucketName);
assert.strictEqual(res.Key, destObjName);
@ -566,10 +567,10 @@ describe('Object Part Copy', () => {
}).then(() => {
process.stdout.write('Getting object put by MPU with ' +
'overwrite part');
return s3.getObjectPromise({
return s3.getObject({
Bucket: destBucketName,
Key: destObjName,
});
}).promise();
}).then(res => {
assert.strictEqual(res.ETag, finalObjETag);
}).catch(err => {
@ -650,18 +651,18 @@ describe('Object Part Copy', () => {
beforeEach(() => {
process.stdout.write('In other account before each');
return otherAccountS3.createBucketPromise({ Bucket:
otherAccountBucket })
return otherAccountS3.createBucket({ Bucket:
otherAccountBucket }).promise()
.catch(err => {
process.stdout.write('Error creating other account ' +
`bucket: ${err}\n`);
throw err;
}).then(() => {
process.stdout.write('Initiating other account MPU');
return otherAccountS3.createMultipartUploadPromise({
return otherAccountS3.createMultipartUpload({
Bucket: otherAccountBucket,
Key: otherAccountKey,
});
}).promise();
}).then(iniateRes => {
otherAccountUploadId = iniateRes.UploadId;
}).catch(err => {
@ -672,11 +673,11 @@ describe('Object Part Copy', () => {
});
afterEach(() => otherAccountBucketUtility.empty(otherAccountBucket)
.then(() => otherAccountS3.abortMultipartUploadPromise({
.then(() => otherAccountS3.abortMultipartUpload({
Bucket: otherAccountBucket,
Key: otherAccountKey,
UploadId: otherAccountUploadId,
}))
}).promise())
.catch(err => {
if (err.code !== 'NoSuchUpload') {
process.stdout.write('Error in other account ' +

View File

@ -54,10 +54,10 @@ describe('DELETE multipart', () => {
`${confLocation.name}`,
() => {
beforeEach(() =>
s3.createBucketPromise({ Bucket: bucket,
s3.createBucket({ Bucket: bucket,
CreateBucketConfiguration: {
LocationConstraint: confLocation.location,
} })
} }).promise()
.catch(err => {
process.stdout.write(`Error in beforeEach: ${err}\n`);
throw err;
@ -89,10 +89,10 @@ describe('DELETE multipart', () => {
let uploadId;
beforeEach(() =>
s3.createMultipartUploadPromise({
s3.createMultipartUpload({
Bucket: bucket,
Key: key,
})
}).promise()
.then(res => {
uploadId = res.UploadId;
return s3.uploadPart({

View File

@ -19,11 +19,11 @@ describe('DELETE object', () => {
describe('with multipart upload', () => {
before(() => {
process.stdout.write('creating bucket\n');
return s3.createBucketPromise({ Bucket: bucketName })
return s3.createBucket({ Bucket: bucketName }).promise()
.then(() => {
process.stdout.write('initiating multipart upload\n');
return s3.createMultipartUploadPromise({ Bucket: bucketName,
Key: objectName });
return s3.createMultipartUpload({ Bucket: bucketName,
Key: objectName }).promise();
})
.then(res => {
process.stdout.write('uploading parts\n');
@ -31,13 +31,13 @@ describe('DELETE object', () => {
const uploads = [];
for (let i = 1; i <= 3; i++) {
uploads.push(
s3.uploadPartPromise({
s3.uploadPart({
Bucket: bucketName,
Key: objectName,
PartNumber: i,
Body: testfile,
UploadId: uploadId,
})
}).promise()
);
}
return Promise.all(uploads);
@ -49,7 +49,7 @@ describe('DELETE object', () => {
.then(res => {
process.stdout.write('about to complete multipart ' +
'upload\n');
return s3.completeMultipartUploadPromise({
return s3.completeMultipartUpload({
Bucket: bucketName,
Key: objectName,
UploadId: uploadId,
@ -60,7 +60,7 @@ describe('DELETE object', () => {
{ ETag: res[2].ETag, PartNumber: 3 },
],
},
});
}).promise();
})
.catch(err => {
process.stdout.write('completeMultipartUpload error: ' +
@ -99,20 +99,20 @@ describe('DELETE object', () => {
const retainDate = moment().add(10, 'days').toISOString();
before(() => {
process.stdout.write('creating bucket\n');
return s3.createBucketPromise({
return s3.createBucket({
Bucket: bucketName,
ObjectLockEnabledForBucket: true,
})
}).promise()
.catch(err => {
process.stdout.write(`Error creating bucket ${err}\n`);
throw err;
})
.then(() => {
process.stdout.write('putting object\n');
return s3.putObjectPromise({
return s3.putObject({
Bucket: bucketName,
Key: objectName,
});
}).promise();
})
.catch(err => {
process.stdout.write('Error putting object');
@ -121,14 +121,14 @@ describe('DELETE object', () => {
.then(res => {
versionIdOne = res.VersionId;
process.stdout.write('putting object retention\n');
return s3.putObjectRetentionPromise({
return s3.putObjectRetention({
Bucket: bucketName,
Key: objectName,
Retention: {
Mode: 'GOVERNANCE',
RetainUntilDate: retainDate,
},
});
}).promise();
})
.catch(err => {
process.stdout.write('Err putting object retention\n');
@ -136,10 +136,10 @@ describe('DELETE object', () => {
})
.then(() => {
process.stdout.write('putting object\n');
return s3.putObjectPromise({
return s3.putObject({
Bucket: bucketName,
Key: objectNameTwo,
});
}).promise();
})
.catch(err => {
process.stdout.write(('Err putting second object\n'));
@ -148,13 +148,13 @@ describe('DELETE object', () => {
.then(res => {
versionIdTwo = res.VersionId;
process.stdout.write('putting object legal hold\n');
return s3.putObjectLegalHoldPromise({
return s3.putObjectLegalHold({
Bucket: bucketName,
Key: objectNameTwo,
LegalHold: {
Status: 'ON',
},
});
}).promise();
})
.catch(err => {
process.stdout.write('Err putting object legal hold\n');

View File

@ -105,7 +105,6 @@ describe('GET object', () => {
UploadId: uploadId,
Body: Buffer.alloc(partSize).fill(partNumber),
};
return s3.uploadPart(uploadPartParams, (err, data) => {
checkNoError(err);
ETags = ETags.concat(data.ETag);
@ -1069,10 +1068,10 @@ describe('GET object with object lock', () => {
ObjectLockMode: mockMode,
ObjectLockLegalHoldStatus: 'ON',
};
return s3.createBucketPromise(
{ Bucket: bucket, ObjectLockEnabledForBucket: true })
.then(() => s3.putObjectPromise(params))
.then(() => s3.getObjectPromise({ Bucket: bucket, Key: key }))
return s3.createBucket(
{ Bucket: bucket, ObjectLockEnabledForBucket: true }).promise()
.then(() => s3.putObject(params).promise())
.then(() => s3.getObject({ Bucket: bucket, Key: key }).promise())
/* eslint-disable no-return-assign */
.then(res => versionId = res.VersionId)
.catch(err => {
@ -1082,7 +1081,7 @@ describe('GET object with object lock', () => {
});
afterEach(() => changeLockPromise([{ bucket, key, versionId }], '')
.then(() => s3.listObjectVersionsPromise({ Bucket: bucket }))
.then(() => s3.listObjectVersions({ Bucket: bucket }).promise())
.then(res => res.Versions.forEach(object => {
const params = [
{
@ -1097,7 +1096,7 @@ describe('GET object with object lock', () => {
process.stdout.write('Emptying and deleting buckets\n');
return bucketUtil.empty(bucket);
})
.then(() => s3.deleteBucketPromise({ Bucket: bucket }))
.then(() => s3.deleteBucket({ Bucket: bucket }).promise())
.catch(err => {
process.stdout.write('Error in afterEach');
throw err;

View File

@ -43,11 +43,11 @@ describe('GET multipart upload object [Cache-Control, Content-Disposition, ' +
})
.then(() => {
process.stdout.write('creating bucket\n');
return s3.createBucketPromise({ Bucket: bucketName });
return s3.createBucket({ Bucket: bucketName }).promise();
})
.then(() => {
process.stdout.write('initiating multipart upload\n');
return s3.createMultipartUploadPromise(params);
return s3.createMultipartUpload(params).promise();
})
.then(res => {
uploadId = res.UploadId;
@ -75,14 +75,14 @@ describe('GET multipart upload object [Cache-Control, Content-Disposition, ' +
() => {
const params = { Bucket: bucketName, Key: 'key', PartNumber: 1,
UploadId: uploadId };
return s3.uploadPartPromise(params)
return s3.uploadPart(params).promise()
.catch(err => {
process.stdout.write(`Error in uploadPart ${err}\n`);
throw err;
})
.then(res => {
process.stdout.write('about to complete multipart upload\n');
return s3.completeMultipartUploadPromise({
return s3.completeMultipartUpload({
Bucket: bucketName,
Key: objectName,
UploadId: uploadId,
@ -91,7 +91,7 @@ describe('GET multipart upload object [Cache-Control, Content-Disposition, ' +
{ ETag: res.ETag, PartNumber: 1 },
],
},
});
}).promise();
})
.catch(err => {
process.stdout.write(`Error completing upload ${err}\n`);
@ -99,9 +99,9 @@ describe('GET multipart upload object [Cache-Control, Content-Disposition, ' +
})
.then(() => {
process.stdout.write('about to get object\n');
return s3.getObjectPromise({
return s3.getObject({
Bucket: bucketName, Key: objectName,
});
}).promise();
})
.catch(err => {
process.stdout.write(`Error getting object ${err}\n`);

View File

@ -23,20 +23,20 @@ describe('GET object legal hold', () => {
beforeEach(() => {
process.stdout.write('Putting buckets and objects\n');
return s3.createBucketPromise(
{ Bucket: bucket, ObjectLockEnabledForBucket: true })
.then(() => s3.createBucketPromise({ Bucket: unlockedBucket }))
.then(() => s3.putObjectPromise({ Bucket: unlockedBucket, Key: key }))
.then(() => s3.putObjectPromise({ Bucket: bucket, Key: keyNoHold }))
.then(() => s3.putObjectPromise({ Bucket: bucket, Key: key }))
return s3.createBucket(
{ Bucket: bucket, ObjectLockEnabledForBucket: true }).promise()
.then(() => s3.createBucket({ Bucket: unlockedBucket }).promise())
.then(() => s3.putObject({ Bucket: unlockedBucket, Key: key }).promise())
.then(() => s3.putObject({ Bucket: bucket, Key: keyNoHold }).promise())
.then(() => s3.putObject({ Bucket: bucket, Key: key }).promise())
.then(res => {
versionId = res.VersionId;
process.stdout.write('Putting object legal hold\n');
return s3.putObjectLegalHoldPromise({
return s3.putObjectLegalHold({
Bucket: bucket,
Key: key,
LegalHold: { Status: 'ON' },
});
}).promise();
})
.catch(err => {
process.stdout.write('Error in beforeEach\n');

View File

@ -28,17 +28,17 @@ describe('aws-node-sdk range test of large end position', () => {
beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucketName })
return s3.createBucket({ Bucket: bucketName }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;
}).then(() =>
s3.putObjectPromise({
s3.putObject({
Bucket: bucketName,
Key: objName,
Body: Buffer.allocUnsafe(2890).fill(0, 0, 2800)
.fill(1, 2800),
}))
}).promise())
.catch(err => {
process.stdout.write(`Error in beforeEach: ${err}\n`);
throw err;

View File

@ -43,20 +43,20 @@ describe('GET object retention', () => {
beforeEach(() => {
process.stdout.write('Putting buckets and objects\n');
return s3.createBucketPromise(
{ Bucket: bucketName, ObjectLockEnabledForBucket: true })
.then(() => s3.createBucketPromise({ Bucket: unlockedBucket }))
.then(() => s3.putObjectPromise({ Bucket: unlockedBucket, Key: objectName }))
.then(() => s3.putObjectPromise({ Bucket: bucketName, Key: noRetentionObject }))
.then(() => s3.putObjectPromise({ Bucket: bucketName, Key: objectName }))
return s3.createBucket(
{ Bucket: bucketName, ObjectLockEnabledForBucket: true }).promise()
.then(() => s3.createBucket({ Bucket: unlockedBucket }).promise())
.then(() => s3.putObject({ Bucket: unlockedBucket, Key: objectName }).promise())
.then(() => s3.putObject({ Bucket: bucketName, Key: noRetentionObject }).promise())
.then(() => s3.putObject({ Bucket: bucketName, Key: objectName }).promise())
.then(res => {
versionId = res.VersionId;
process.stdout.write('Putting object retention\n');
return s3.putObjectRetentionPromise({
return s3.putObjectRetention({
Bucket: bucketName,
Key: objectName,
Retention: retentionConfig,
});
}).promise();
})
.catch(err => {
process.stdout.write('Error in beforeEach\n');

View File

@ -18,7 +18,7 @@ describe('Initiate MPU', () => {
beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucket })
return s3.createBucket({ Bucket: bucket }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;

View File

@ -23,15 +23,15 @@ describe('List parts', () => {
beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucket })
.then(() => s3.createMultipartUploadPromise({
Bucket: bucket, Key: key }))
return s3.createBucket({ Bucket: bucket }).promise()
.then(() => s3.createMultipartUpload({
Bucket: bucket, Key: key }).promise())
.then(res => {
uploadId = res.UploadId;
return s3.uploadPartPromise({ Bucket: bucket, Key: key,
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
}).then(() => s3.uploadPartPromise({ Bucket: bucket, Key: key,
PartNumber: 2, UploadId: uploadId, Body: bodySecondPart })
return s3.uploadPart({ Bucket: bucket, Key: key,
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart }).promise();
}).then(() => s3.uploadPart({ Bucket: bucket, Key: key,
PartNumber: 2, UploadId: uploadId, Body: bodySecondPart }).promise()
).then(res => {
secondEtag = res.ETag;
return secondEtag;
@ -44,9 +44,9 @@ describe('List parts', () => {
afterEach(() => {
process.stdout.write('Emptying bucket');
return s3.abortMultipartUploadPromise({
return s3.abortMultipartUpload({
Bucket: bucket, Key: key, UploadId: uploadId,
})
}).promise()
.then(() => bucketUtil.empty(bucket))
.then(() => {
process.stdout.write('Deleting bucket');
@ -80,13 +80,13 @@ describe('List parts', () => {
/* eslint-disable no-param-reassign */
function createPart(sigCfg, bucketUtil, s3, key) {
let uploadId;
return s3.createBucketPromise({ Bucket: bucket })
.then(() => s3.createMultipartUploadPromise({
Bucket: bucket, Key: key }))
return s3.createBucket({ Bucket: bucket }).promise()
.then(() => s3.createMultipartUpload({
Bucket: bucket, Key: key }).promise())
.then(res => {
uploadId = res.UploadId;
return s3.uploadPartPromise({ Bucket: bucket, Key: key,
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
return s3.uploadPart({ Bucket: bucket, Key: key,
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart }).promise();
})
.then(() => Promise.resolve(uploadId));
}
@ -94,9 +94,9 @@ function createPart(sigCfg, bucketUtil, s3, key) {
function deletePart(s3, bucketUtil, key, uploadId) {
process.stdout.write('Emptying bucket');
return s3.abortMultipartUploadPromise({
return s3.abortMultipartUpload({
Bucket: bucket, Key: key, UploadId: uploadId,
})
}).promise()
.then(() => bucketUtil.empty(bucket))
.then(() => {
process.stdout.write('Deleting bucket');

View File

@ -85,34 +85,34 @@ describe('aws-node-sdk test suite of listMultipartUploads', () =>
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucket })
return s3.createBucket({ Bucket: bucket }).promise()
.then(() => bucketUtil.getOwner())
.then(res => {
// The owner of the bucket will also be the MPU upload owner.
data.displayName = res.DisplayName;
data.userId = res.ID;
})
.then(() => s3.createMultipartUploadPromise({
.then(() => s3.createMultipartUpload({
Bucket: bucket,
Key: objectKey,
}))
}).promise())
.then(res => {
data.uploadId = res.UploadId;
});
});
afterEach(() =>
s3.abortMultipartUploadPromise({
s3.abortMultipartUpload({
Bucket: bucket,
Key: objectKey,
UploadId: data.uploadId,
})
}).promise()
.then(() => bucketUtil.empty(bucket))
.then(() => bucketUtil.deleteOne(bucket))
);
it('should list ongoing multipart uploads', () =>
s3.listMultipartUploadsPromise({ Bucket: bucket })
s3.listMultipartUploads({ Bucket: bucket }).promise()
.then(res => checkValues(res, data))
);
@ -121,22 +121,22 @@ describe('aws-node-sdk test suite of listMultipartUploads', () =>
data.delimiter = 'test-delimiter';
data.maxUploads = 1;
return s3.listMultipartUploadsPromise({
return s3.listMultipartUploads({
Bucket: bucket,
Prefix: 'to',
Delimiter: 'test-delimiter',
MaxUploads: 1,
})
}).promise()
.then(res => checkValues(res, data));
});
it('should list 0 multipart uploads when MaxUploads is 0', () => {
data.maxUploads = 0;
return s3.listMultipartUploadsPromise({
return s3.listMultipartUploads({
Bucket: bucket,
MaxUploads: 0,
})
}).promise()
.then(res => checkValues(res, data));
});
})

View File

@ -61,7 +61,7 @@ describe('Multi-Object Delete Success', function success() {
signatureVersion: 'v4',
});
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucketName })
return s3.createBucket({ Bucket: bucketName }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;
@ -76,11 +76,11 @@ describe('Multi-Object Delete Success', function success() {
const putPromises = objects.map(key => {
const mustComplete = Math.max(0, queued.length - parallel + 1);
const result = Promise.some(queued, mustComplete).then(() =>
s3.putObjectPromise({
s3.putObject({
Bucket: bucketName,
Key: key,
Body: 'somebody',
})
}).promise()
);
queued.push(result);
return result;
@ -92,17 +92,17 @@ describe('Multi-Object Delete Success', function success() {
});
});
afterEach(() => s3.deleteBucketPromise({ Bucket: bucketName }));
afterEach(() => s3.deleteBucket({ Bucket: bucketName }).promise());
it('should batch delete 1000 objects', () => {
const objects = createObjectsList(1000);
return s3.deleteObjectsPromise({
return s3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: objects,
Quiet: false,
},
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.Deleted.length, 1000);
// order of returned objects not sorted
assert.deepStrictEqual(sortList(res.Deleted), sortList(objects));
@ -114,13 +114,13 @@ describe('Multi-Object Delete Success', function success() {
it('should batch delete 1000 objects quietly', () => {
const objects = createObjectsList(1000);
return s3.deleteObjectsPromise({
return s3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: objects,
Quiet: true,
},
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.Deleted.length, 0);
assert.strictEqual(res.Errors.length, 0);
}).catch(err => {
@ -137,24 +137,24 @@ describe('Multi-Object Delete Error Responses', () => {
beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucketName })
return s3.createBucket({ Bucket: bucketName }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;
});
});
afterEach(() => s3.deleteBucketPromise({ Bucket: bucketName }));
afterEach(() => s3.deleteBucket({ Bucket: bucketName }).promise());
it('should return error if request deletion of more than 1000 objects',
() => {
const objects = createObjectsList(1001);
return s3.deleteObjectsPromise({
return s3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: objects,
},
}).catch(err => {
}).promise().catch(err => {
checkError(err, 'MalformedXML');
});
});
@ -162,12 +162,12 @@ describe('Multi-Object Delete Error Responses', () => {
it('should return error if request deletion of 0 objects',
() => {
const objects = createObjectsList(0);
return s3.deleteObjectsPromise({
return s3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: objects,
},
}).catch(err => {
}).promise().catch(err => {
checkError(err, 'MalformedXML');
});
});
@ -175,12 +175,12 @@ describe('Multi-Object Delete Error Responses', () => {
it('should return no error if try to delete non-existent objects',
() => {
const objects = createObjectsList(1000);
return s3.deleteObjectsPromise({
return s3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: objects,
},
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.Deleted.length, 1000);
assert.strictEqual(res.Errors.length, 0);
}).catch(err => {
@ -190,12 +190,12 @@ describe('Multi-Object Delete Error Responses', () => {
it('should return error if no such bucket', () => {
const objects = createObjectsList(1);
return s3.deleteObjectsPromise({
return s3.deleteObjects({
Bucket: 'nosuchbucket2323292093',
Delete: {
Objects: objects,
},
}).catch(err => {
}).promise().catch(err => {
checkError(err, 'NoSuchBucket');
});
});
@ -213,18 +213,18 @@ describe('Multi-Object Delete Access', function access() {
signatureVersion: 'v4',
});
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucketName })
return s3.createBucket({ Bucket: bucketName }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;
})
.then(() => {
for (let i = 1; i < 501; i++) {
createObjects.push(s3.putObjectPromise({
createObjects.push(s3.putObject({
Bucket: bucketName,
Key: `${key}${i}`,
Body: 'somebody',
}));
}).promise());
}
return Promise.all(createObjects)
.catch(err => {
@ -234,7 +234,7 @@ describe('Multi-Object Delete Access', function access() {
});
});
after(() => s3.deleteBucketPromise({ Bucket: bucketName }));
after(() => s3.deleteBucket({ Bucket: bucketName }).promise());
it('should return access denied error for each object where no acl ' +
'permission', () => {
@ -245,13 +245,13 @@ describe('Multi-Object Delete Access', function access() {
item.Code = 'AccessDenied';
item.Message = 'Access Denied';
});
return otherAccountS3.deleteObjectsPromise({
return otherAccountS3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: objects,
Quiet: false,
},
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.Deleted.length, 0);
assert.deepStrictEqual(sortList(res.Errors), sortList(errorList));
assert.strictEqual(res.Errors.length, 500);
@ -263,13 +263,13 @@ describe('Multi-Object Delete Access', function access() {
it('should batch delete objects where requester has permission', () => {
const objects = createObjectsList(500);
return s3.deleteObjectsPromise({
return s3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: objects,
Quiet: false,
},
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.Deleted.length, 500);
assert.strictEqual(res.Errors.length, 0);
}).catch(err => {
@ -289,11 +289,11 @@ describe('Multi-Object Delete with Object Lock', () => {
signatureVersion: 'v4',
});
s3 = bucketUtil.s3;
return s3.createBucketPromise({
return s3.createBucket({
Bucket: bucketName,
ObjectLockEnabledForBucket: true,
})
.then(() => s3.putObjectLockConfigurationPromise({
}).promise()
.then(() => s3.putObjectLockConfiguration({
Bucket: bucketName,
ObjectLockConfiguration: {
ObjectLockEnabled: 'Enabled',
@ -304,18 +304,18 @@ describe('Multi-Object Delete with Object Lock', () => {
},
},
},
}))
}).promise())
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;
})
.then(() => {
for (let i = 1; i < 6; i++) {
createObjects.push(s3.putObjectPromise({
createObjects.push(s3.putObject({
Bucket: bucketName,
Key: `${key}${i}`,
Body: 'somebody',
}));
}).promise());
}
return Promise.all(createObjects)
.then(res => {
@ -330,17 +330,17 @@ describe('Multi-Object Delete with Object Lock', () => {
});
});
after(() => s3.deleteBucketPromise({ Bucket: bucketName }));
after(() => s3.deleteBucket({ Bucket: bucketName }).promise());
it('should not delete locked objects', () => {
const objects = createObjectsList(5, versionIds);
return s3.deleteObjectsPromise({
return s3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: objects,
Quiet: false,
},
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.Errors.length, 5);
res.Errors.forEach(err => assert.strictEqual(err.Code, 'AccessDenied'));
});
@ -361,13 +361,13 @@ describe('Multi-Object Delete with Object Lock', () => {
date: moment().subtract(10, 'days').toISOString(),
};
return changeLockPromise(objectsCopy, newRetention)
.then(() => s3.deleteObjectsPromise({
.then(() => s3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: objects,
Quiet: false,
},
})).then(res => {
}).promise()).then(res => {
assert.strictEqual(res.Deleted.length, 5);
assert.strictEqual(res.Errors.length, 0);
}).catch(err => {

View File

@ -99,7 +99,7 @@ describe('Object Copy', () => {
});
});
beforeEach(() => s3.putObjectPromise({
beforeEach(() => s3.putObject({
Bucket: sourceBucketName,
Key: sourceObjName,
Body: content,
@ -109,13 +109,13 @@ describe('Object Copy', () => {
ContentEncoding: originalContentEncoding,
Expires: originalExpires,
Tagging: originalTagging,
}).then(res => {
}).promise().then(res => {
etag = res.ETag;
etagTrim = etag.substring(1, etag.length - 1);
return s3.headObjectPromise({
return s3.headObject({
Bucket: sourceBucketName,
Key: sourceObjName,
});
}).promise();
}).then(res => {
lastModified = res.LastModified;
}));
@ -1258,19 +1258,19 @@ describe('Object Copy with object lock enabled on both destination ' +
});
});
beforeEach(() => s3.putObjectPromise({
beforeEach(() => s3.putObject({
Bucket: sourceBucketName,
Key: sourceObjName,
Body: content,
Metadata: originalMetadata,
ObjectLockMode: 'GOVERNANCE',
ObjectLockRetainUntilDate: new Date(2050, 1, 1),
}).then(res => {
}).promise().then(res => {
versionId = res.VersionId;
s3.headObjectPromise({
return s3.headObject({
Bucket: sourceBucketName,
Key: sourceObjName,
});
}).promise();
}));
afterEach(() => bucketUtil.empty(sourceBucketName)

View File

@ -64,15 +64,15 @@ describe('HEAD object, conditions', () => {
}, fields), cb);
}
beforeEach(() => s3.putObjectPromise({
beforeEach(() => s3.putObject({
Bucket: bucketName,
Key: objectName,
Body: 'I am the best content ever',
}).then(res => {
}).promise().then(res => {
etag = res.ETag;
etagTrim = etag.substring(1, etag.length - 1);
return s3.headObjectPromise(
{ Bucket: bucketName, Key: objectName });
return s3.headObject(
{ Bucket: bucketName, Key: objectName }).promise();
}).then(res => {
lastModified = res.LastModified;
}));
@ -554,10 +554,10 @@ describe('HEAD object with object lock', () => {
ObjectLockMode: mockMode,
ObjectLockLegalHoldStatus: 'ON',
};
return s3.createBucketPromise(
{ Bucket: bucket, ObjectLockEnabledForBucket: true })
.then(() => s3.putObjectPromise(params))
.then(() => s3.getObjectPromise({ Bucket: bucket, Key: key }))
return s3.createBucket(
{ Bucket: bucket, ObjectLockEnabledForBucket: true }).promise()
.then(() => s3.putObject(params).promise())
.then(() => s3.getObject({ Bucket: bucket, Key: key }).promise())
/* eslint-disable no-return-assign */
.then(res => versionId = res.VersionId)
.catch(err => {
@ -567,7 +567,7 @@ describe('HEAD object with object lock', () => {
});
afterEach(() => changeLockPromise([{ bucket, key, versionId }], '')
.then(() => s3.listObjectVersionsPromise({ Bucket: bucket }))
.then(() => s3.listObjectVersions({ Bucket: bucket }).promise())
.then(res => res.Versions.forEach(object => {
const params = [
{
@ -582,7 +582,7 @@ describe('HEAD object with object lock', () => {
process.stdout.write('Emptying and deleting buckets\n');
return bucketUtil.empty(bucket);
})
.then(() => s3.deleteBucketPromise({ Bucket: bucket }))
.then(() => s3.deleteBucket({ Bucket: bucket }).promise())
.catch(err => {
process.stdout.write('Error in afterEach');
throw err;

View File

@ -40,7 +40,7 @@ describe('HEAD object, compatibility headers [Cache-Control, ' +
ContentEncoding: contentEncoding,
Expires: expires,
};
return s3.putObjectPromise(params);
return s3.putObject(params).promise();
})
.catch(err => {
process.stdout.write(`Error with putObject: ${err}\n`);

View File

@ -32,13 +32,13 @@ describe('Put object with same key as prior object', () => {
.catch(done);
});
beforeEach(() => s3.putObjectPromise({
beforeEach(() => s3.putObject({
Bucket: bucketName,
Key: objectName,
Body: 'I am the best content ever',
Metadata: firstPutMetadata,
}).then(() =>
s3.headObjectPromise({ Bucket: bucketName, Key: objectName })
}).promise().then(() =>
s3.headObject({ Bucket: bucketName, Key: objectName }).promise()
).then(res => {
assert.deepStrictEqual(res.Metadata, firstPutMetadata);
}));
@ -48,13 +48,13 @@ describe('Put object with same key as prior object', () => {
after(() => bucketUtil.deleteOne(bucketName));
it('should overwrite all user metadata and data on overwrite put',
() => s3.putObjectPromise({
() => s3.putObject({
Bucket: bucketName,
Key: objectName,
Body: 'Much different',
Metadata: secondPutMetadata,
}).then(() =>
s3.getObjectPromise({ Bucket: bucketName, Key: objectName })
}).promise().then(() =>
s3.getObject({ Bucket: bucketName, Key: objectName }).promise()
).then(res => {
assert.deepStrictEqual(res.Metadata, secondPutMetadata);
assert.deepStrictEqual(res.Body.toString(),

View File

@ -26,7 +26,7 @@ describe('PUT object', () => {
beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucket })
return s3.createBucket({ Bucket: bucket }).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;
@ -278,10 +278,10 @@ describe('PUT object with object lock', () => {
beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({
return s3.createBucket({
Bucket: bucket,
ObjectLockEnabledForBucket: true,
})
}).promise()
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
throw err;

View File

@ -73,9 +73,9 @@ describe('PUT Object ACL', () => {
];
Promise
.mapSeries(objects, param => s3.putObjectPromise(param))
.then(() => s3.putObjectAclPromise({ Bucket, Key,
ACL: 'public-read' }))
.mapSeries(objects, param => s3.putObject(param).promise())
.then(() => s3.putObjectAcl({ Bucket, Key,
ACL: 'public-read' }).promise())
.then(data => {
assert(data);
done();

View File

@ -41,11 +41,11 @@ describe('PUT object legal hold', () => {
beforeEach(() => {
process.stdout.write('Putting buckets and objects\n');
return s3.createBucketPromise(
{ Bucket: bucket, ObjectLockEnabledForBucket: true })
.then(() => s3.createBucketPromise({ Bucket: unlockedBucket }))
.then(() => s3.putObjectPromise({ Bucket: unlockedBucket, Key: key }))
.then(() => s3.putObjectPromise({ Bucket: bucket, Key: key }))
return s3.createBucket(
{ Bucket: bucket, ObjectLockEnabledForBucket: true }).promise()
.then(() => s3.createBucket({ Bucket: unlockedBucket }).promise())
.then(() => s3.putObject({ Bucket: unlockedBucket, Key: key }).promise())
.then(() => s3.putObject({ Bucket: bucket, Key: key }).promise())
.then(res => {
versionId = res.VersionId;
})

View File

@ -15,9 +15,9 @@ describe('PUT object', () => {
beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketPromise({ Bucket: bucket })
.then(() => s3.createMultipartUploadPromise({
Bucket: bucket, Key: key }))
return s3.createBucket({ Bucket: bucket }).promise()
.then(() => s3.createMultipartUpload({
Bucket: bucket, Key: key }).promise())
.then(res => {
uploadId = res.UploadId;
return uploadId;
@ -30,9 +30,9 @@ describe('PUT object', () => {
afterEach(() => {
process.stdout.write('Emptying bucket');
return s3.abortMultipartUploadPromise({
return s3.abortMultipartUpload({
Bucket: bucket, Key: key, UploadId: uploadId,
})
}).promise()
.then(() => bucketUtil.empty(bucket))
.then(() => {
process.stdout.write('Deleting bucket');

View File

@ -25,11 +25,11 @@ describe('PUT object retention', () => {
beforeEach(() => {
process.stdout.write('Putting buckets and objects\n');
return s3.createBucketPromise(
{ Bucket: bucketName, ObjectLockEnabledForBucket: true })
.then(() => s3.createBucketPromise({ Bucket: unlockedBucket }))
.then(() => s3.putObjectPromise({ Bucket: unlockedBucket, Key: objectName }))
.then(() => s3.putObjectPromise({ Bucket: bucketName, Key: objectName }))
return s3.createBucket(
{ Bucket: bucketName, ObjectLockEnabledForBucket: true }).promise()
.then(() => s3.createBucket({ Bucket: unlockedBucket }).promise())
.then(() => s3.putObject({ Bucket: unlockedBucket, Key: objectName }).promise())
.then(() => s3.putObject({ Bucket: bucketName, Key: objectName }).promise())
.then(res => {
versionId = res.VersionId;
})

View File

@ -35,11 +35,11 @@ function getOuterRange(range, bytes) {
// Get the ranged object from a bucket. Write the response body to a file, then
// use getRangeExec to check that all the bytes are in the correct location.
function checkRanges(range, bytes) {
return s3.getObjectPromise({
return s3.getObject({
Bucket: bucket,
Key: key,
Range: `bytes=${range}`,
})
}).promise()
.then(res => {
const { begin, end } = getOuterRange(range, bytes);
const total = (end - begin) + 1;
@ -68,13 +68,13 @@ function uploadParts(bytes, uploadId) {
return Promise.map([1, 2], part =>
execFileAsync('dd', [`if=${name}`, `of=${name}.mpuPart${part}`,
'bs=5242880', `skip=${part - 1}`, 'count=1'])
.then(() => s3.uploadPartPromise({
.then(() => s3.uploadPart({
Bucket: bucket,
Key: key,
PartNumber: part,
UploadId: uploadId,
Body: createReadStream(`${name}.mpuPart${part}`),
}))
}).promise())
);
}
@ -97,17 +97,17 @@ describe('aws-node-sdk range tests', () => {
let uploadId;
beforeEach(() =>
s3.createBucketPromise({ Bucket: bucket })
.then(() => s3.createMultipartUploadPromise({
s3.createBucket({ Bucket: bucket }).promise()
.then(() => s3.createMultipartUpload({
Bucket: bucket,
Key: key,
}))
}).promise())
.then(res => {
uploadId = res.UploadId;
})
.then(() => createHashedFile(fileSize))
.then(() => uploadParts(fileSize, uploadId))
.then(res => s3.completeMultipartUploadPromise({
.then(res => s3.completeMultipartUpload({
Bucket: bucket,
Key: key,
UploadId: uploadId,
@ -123,15 +123,15 @@ describe('aws-node-sdk range tests', () => {
},
],
},
}))
}).promise())
);
afterEach(() => bucketUtil.empty(bucket)
.then(() => s3.abortMultipartUploadPromise({
.then(() => s3.abortMultipartUpload({
Bucket: bucket,
Key: key,
UploadId: uploadId,
}))
}).promise())
.catch(err => new Promise((resolve, reject) => {
if (err.code !== 'NoSuchUpload') {
reject(err);
@ -164,13 +164,13 @@ describe('aws-node-sdk range tests', () => {
const fileSize = 2000;
beforeEach(() =>
s3.createBucketPromise({ Bucket: bucket })
s3.createBucket({ Bucket: bucket }).promise()
.then(() => createHashedFile(fileSize))
.then(() => s3.putObjectPromise({
.then(() => s3.putObject({
Bucket: bucket,
Key: key,
Body: createReadStream(`hashedFile.${fileSize}`),
})));
}).promise()));
afterEach(() =>
bucketUtil.empty(bucket)
@ -221,13 +221,13 @@ describe('aws-node-sdk range tests', () => {
const fileSize = 2900;
beforeEach(() =>
s3.createBucketPromise({ Bucket: bucket })
s3.createBucket({ Bucket: bucket }).promise()
.then(() => createHashedFile(fileSize))
.then(() => s3.putObjectPromise({
.then(() => s3.putObject({
Bucket: bucket,
Key: key,
Body: createReadStream(`hashedFile.${fileSize}`),
})));
}).promise()));
afterEach(() =>
bucketUtil.empty(bucket)

View File

@ -34,21 +34,21 @@ describe('User visits bucket website endpoint and requests resource ' +
describe('when x-amz-website-redirect-location: /redirect.html', () => {
beforeEach(() => {
const webConfig = new WebsiteConfigTester('index.html');
return s3.putBucketWebsitePromise({ Bucket: bucket,
WebsiteConfiguration: webConfig })
.then(() => s3.putObjectPromise({ Bucket: bucket,
return s3.putBucketWebsite({ Bucket: bucket,
WebsiteConfiguration: webConfig }).promise()
.then(() => s3.putObject({ Bucket: bucket,
Key: 'index.html',
ACL: 'public-read',
Body: fs.readFileSync(path.join(__dirname,
'/websiteFiles/index.html')),
ContentType: 'text/html',
WebsiteRedirectLocation: '/redirect.html' }))
.then(() => s3.putObjectPromise({ Bucket: bucket,
WebsiteRedirectLocation: '/redirect.html' }).promise())
.then(() => s3.putObject({ Bucket: bucket,
Key: 'redirect.html',
ACL: 'public-read',
Body: fs.readFileSync(path.join(__dirname,
'/websiteFiles/redirect.html')),
ContentType: 'text/html' }));
ContentType: 'text/html' }).promise());
});
afterEach(() => bucketUtil.empty(bucket));
@ -76,15 +76,15 @@ describe('User visits bucket website endpoint and requests resource ' +
() => {
beforeEach(() => {
const webConfig = new WebsiteConfigTester('index.html');
return s3.putBucketWebsitePromise({ Bucket: bucket,
WebsiteConfiguration: webConfig })
.then(() => s3.putObjectPromise({ Bucket: bucket,
return s3.putBucketWebsite({ Bucket: bucket,
WebsiteConfiguration: webConfig }).promise()
.then(() => s3.putObject({ Bucket: bucket,
Key: 'index.html',
ACL: 'public-read',
Body: fs.readFileSync(path.join(__dirname,
'/websiteFiles/index.html')),
ContentType: 'text/html',
WebsiteRedirectLocation: 'https://www.google.com' }));
WebsiteRedirectLocation: 'https://www.google.com' }).promise());
});
afterEach(() => bucketUtil.empty(bucket));
@ -112,14 +112,14 @@ describe('User visits bucket website endpoint and requests resource ' +
describe('when key with header is private', () => {
beforeEach(() => {
const webConfig = new WebsiteConfigTester('index.html');
return s3.putBucketWebsitePromise({ Bucket: bucket,
WebsiteConfiguration: webConfig })
.then(() => s3.putObjectPromise({ Bucket: bucket,
return s3.putBucketWebsite({ Bucket: bucket,
WebsiteConfiguration: webConfig }).promise()
.then(() => s3.putObject({ Bucket: bucket,
Key: 'index.html',
Body: fs.readFileSync(path.join(__dirname,
'/websiteFiles/index.html')),
ContentType: 'text/html',
WebsiteRedirectLocation: 'https://www.google.com' }));
WebsiteRedirectLocation: 'https://www.google.com' }).promise());
});
afterEach(() => bucketUtil.empty(bucket));
@ -154,20 +154,20 @@ describe('User visits bucket website endpoint and requests resource ' +
HostName: 'www.google.com',
};
webConfig.addRoutingRule(redirect, condition);
return s3.putBucketWebsitePromise({ Bucket: bucket,
WebsiteConfiguration: webConfig })
.then(() => s3.putObjectPromise({ Bucket: bucket,
return s3.putBucketWebsite({ Bucket: bucket,
WebsiteConfiguration: webConfig }).promise()
.then(() => s3.putObject({ Bucket: bucket,
Key: 'index.html',
Body: fs.readFileSync(path.join(__dirname,
'/websiteFiles/index.html')),
ContentType: 'text/html',
WebsiteRedirectLocation: '/redirect.html' }))
.then(() => s3.putObjectPromise({ Bucket: bucket,
WebsiteRedirectLocation: '/redirect.html' }).promise())
.then(() => s3.putObject({ Bucket: bucket,
Key: 'redirect.html',
ACL: 'public-read',
Body: fs.readFileSync(path.join(__dirname,
'/websiteFiles/redirect.html')),
ContentType: 'text/html' }));
ContentType: 'text/html' }).promise());
});
afterEach(() => bucketUtil.empty(bucket));
@ -203,15 +203,15 @@ describe('User visits bucket website endpoint and requests resource ' +
};
const webConfig = new WebsiteConfigTester(null, null,
redirectAllTo);
return s3.putBucketWebsitePromise({ Bucket: bucket,
WebsiteConfiguration: webConfig })
.then(() => s3.putObjectPromise({ Bucket: bucket,
return s3.putBucketWebsite({ Bucket: bucket,
WebsiteConfiguration: webConfig }).promise()
.then(() => s3.putObject({ Bucket: bucket,
Key: 'index.html',
ACL: 'public-read',
Body: fs.readFileSync(path.join(__dirname,
'/websiteFiles/index.html')),
ContentType: 'text/html',
WebsiteRedirectLocation: '/redirect.html' }));
WebsiteRedirectLocation: '/redirect.html' }).promise());
});
afterEach(() => bucketUtil.empty(bucket));
@ -250,15 +250,15 @@ describe('User visits bucket website endpoint and requests resource ' +
HostName: 'www.google.com',
};
webConfig.addRoutingRule(redirect, condition);
return s3.putBucketWebsitePromise({ Bucket: bucket,
WebsiteConfiguration: webConfig })
.then(() => s3.putObjectPromise({ Bucket: bucket,
return s3.putBucketWebsite({ Bucket: bucket,
WebsiteConfiguration: webConfig }).promise()
.then(() => s3.putObject({ Bucket: bucket,
Key: 'about/index.html',
ACL: 'public-read',
Body: fs.readFileSync(path.join(__dirname,
'/websiteFiles/index.html')),
ContentType: 'text/html',
WebsiteRedirectLocation: '/redirect.html' }));
WebsiteRedirectLocation: '/redirect.html' }).promise());
});
afterEach(() => bucketUtil.empty(bucket));
@ -296,21 +296,21 @@ describe('User visits bucket website endpoint and requests resource ' +
ReplaceKeyWith: 'redirect.html',
};
webConfig.addRoutingRule(redirect, condition);
return s3.putBucketWebsitePromise({ Bucket: bucket,
WebsiteConfiguration: webConfig })
.then(() => s3.putObjectPromise({ Bucket: bucket,
return s3.putBucketWebsite({ Bucket: bucket,
WebsiteConfiguration: webConfig }).promise()
.then(() => s3.putObject({ Bucket: bucket,
Key: 'index.html',
ACL: 'public-read',
Body: fs.readFileSync(path.join(__dirname,
'/websiteFiles/index.html')),
ContentType: 'text/html',
WebsiteRedirectLocation: 'https://www.google.com' }))
.then(() => s3.putObjectPromise({ Bucket: bucket,
WebsiteRedirectLocation: 'https://www.google.com' }).promise())
.then(() => s3.putObject({ Bucket: bucket,
Key: 'redirect.html',
ACL: 'public-read',
Body: fs.readFileSync(path.join(__dirname,
'/websiteFiles/redirect.html')),
ContentType: 'text/html' }));
ContentType: 'text/html' }).promise());
});
afterEach(() => bucketUtil.empty(bucket));

View File

@ -1,6 +1,6 @@
const assert = require('assert');
const tv4 = require('tv4');
const Promise = require('bluebird');
const bluebird = require('bluebird');
const async = require('async');
const { S3 } = require('aws-sdk');
@ -153,7 +153,7 @@ describeFn('GET Service - AWS.S3.listBuckets', function getService() {
it('should list buckets', done => {
s3
.listBucketsPromise()
.listBuckets().promise()
.then(data => {
const isValidResponse = tv4.validate(data, svcSchema);
if (!isValidResponse) {
@ -196,13 +196,13 @@ describeFn('GET Service - AWS.S3.listBuckets', function getService() {
let anotherS3;
before(() => {
anotherS3 = Promise.promisifyAll(new S3(getConfig('lisa')),
{ suffix: 'Promise' });
anotherS3 = new S3(getConfig('lisa'));
anotherS3.config.setPromisesDependency(bluebird);
});
it('should not return other accounts bucket list', done => {
anotherS3
.listBucketsPromise()
.listBuckets().promise()
.then(data => {
const hasSameBuckets = data.Buckets
.filter(filterFn)

View File

@ -29,7 +29,7 @@ describe('aws-node-sdk test delete bucket', () => {
async.waterfall([
next => s3.createBucket({ Bucket: bucketName },
err => next(err)),
next => s3.putBucketVersioningPromise({
next => s3.putBucketVersioning({
Bucket: bucketName,
VersioningConfiguration: {
Status: 'Enabled',

View File

@ -94,13 +94,13 @@ describe('Multi-Object Versioning Delete Success', function success() {
it('should batch delete 1000 objects quietly', () => {
const objects = objectsRes.slice(0, 1000).map(obj =>
({ Key: obj.Key, VersionId: obj.VersionId }));
return s3.deleteObjectsPromise({
return s3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: objects,
Quiet: true,
},
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.Deleted.length, 0);
assert.strictEqual(res.Errors.length, 0);
}).catch(err => {
@ -111,13 +111,13 @@ describe('Multi-Object Versioning Delete Success', function success() {
it('should batch delete 1000 objects', () => {
const objects = objectsRes.slice(0, 1000).map(obj =>
({ Key: obj.Key, VersionId: obj.VersionId }));
return s3.deleteObjectsPromise({
return s3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: objects,
Quiet: false,
},
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.Deleted.length, 1000);
// order of returned objects not sorted
assert.deepStrictEqual(sortList(res.Deleted),
@ -133,12 +133,12 @@ describe('Multi-Object Versioning Delete Success', function success() {
const objects = objectsRes.slice(0, 1000).map(obj =>
({ Key: obj.Key, VersionId: obj.VersionId }));
objects[0].VersionId = 'invalid-version-id';
return s3.deleteObjectsPromise({
return s3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: objects,
},
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.Deleted.length, 999);
assert.strictEqual(res.Errors.length, 1);
assert.strictEqual(res.Errors[0].Code, 'NoSuchVersion');
@ -153,12 +153,12 @@ describe('Multi-Object Versioning Delete Success', function success() {
const objects = objectsRes.slice(0, 1000).map(obj =>
({ Key: obj.Key, VersionId: obj.VersionId }));
objects[0].VersionId = nonExistingId;
return s3.deleteObjectsPromise({
return s3.deleteObjects({
Bucket: bucketName,
Delete: {
Objects: objects,
},
}).then(res => {
}).promise().then(res => {
assert.strictEqual(res.Deleted.length, 1000);
assert.strictEqual(res.Errors.length, 0);
const foundVersionId = res.Deleted.find(entry =>
@ -183,7 +183,7 @@ describe('Multi-Object Versioning Delete - deleting delete marker',
async.waterfall([
next => s3.createBucket({ Bucket: bucketName },
err => next(err)),
next => s3.putBucketVersioningPromise({
next => s3.putBucketVersioning({
Bucket: bucketName,
VersioningConfiguration: {
Status: 'Enabled',

View File

@ -88,11 +88,11 @@ describe('Object Version Copy', () => {
beforeEach(() => bucketUtil.createOne(sourceBucketName)
.then(() => bucketUtil.createOne(destBucketName))
.then(() => s3.putBucketVersioningPromise({
.then(() => s3.putBucketVersioning({
Bucket: sourceBucketName,
VersioningConfiguration: { Status: 'Enabled' },
}))
.then(() => s3.putObjectPromise({
}).promise())
.then(() => s3.putObject({
Bucket: sourceBucketName,
Key: sourceObjName,
Body: content,
@ -102,22 +102,22 @@ describe('Object Version Copy', () => {
ContentEncoding: originalContentEncoding,
Expires: originalExpires,
Tagging: originalTagging,
})).then(res => {
}).promise()).then(res => {
etag = res.ETag;
versionId = res.VersionId;
copySource = `${sourceBucketName}/${sourceObjName}` +
`?versionId=${versionId}`;
etagTrim = etag.substring(1, etag.length - 1);
copySourceVersionId = res.VersionId;
return s3.headObjectPromise({
return s3.headObject({
Bucket: sourceBucketName,
Key: sourceObjName,
});
}).promise();
}).then(res => {
lastModified = res.LastModified;
}).then(() => s3.putObjectPromise({ Bucket: sourceBucketName,
}).then(() => s3.putObject({ Bucket: sourceBucketName,
Key: sourceObjName,
Body: secondContent }))
Body: secondContent }).promise())
);
afterEach(done => async.parallel([

View File

@ -50,7 +50,7 @@ describe('Versioning on a replication source bucket', () => {
beforeEach(done => {
async.waterfall([
cb => s3.createBucket({ Bucket: bucketName }, e => cb(e)),
cb => s3.putBucketVersioningPromise({
cb => s3.putBucketVersioning({
Bucket: bucketName,
VersioningConfiguration: {
Status: 'Enabled',

View File

@ -111,13 +111,13 @@ describeSkipIfAWS('backbeat routes', () => {
bucketUtil = new BucketUtility(
'default', { signatureVersion: 'v4' });
s3 = bucketUtil.s3;
s3.createBucketPromise({ Bucket: TEST_BUCKET })
.then(() => s3.putBucketVersioningPromise(
s3.createBucket({ Bucket: TEST_BUCKET }).promise()
.then(() => s3.putBucketVersioning(
{
Bucket: TEST_BUCKET,
VersioningConfiguration: { Status: 'Enabled' },
}))
.then(() => s3.createBucketPromise({ Bucket: NONVERSIONED_BUCKET }))
}).promise())
.then(() => s3.createBucket({ Bucket: NONVERSIONED_BUCKET }).promise())
.then(() => done())
.catch(err => {
process.stdout.write(`Error creating bucket: ${err}\n`);
@ -126,8 +126,8 @@ describeSkipIfAWS('backbeat routes', () => {
});
after(done => {
bucketUtil.empty(TEST_BUCKET)
.then(() => s3.deleteBucketPromise({ Bucket: TEST_BUCKET }))
.then(() => s3.deleteBucketPromise({ Bucket: NONVERSIONED_BUCKET }))
.then(() => s3.deleteBucket({ Bucket: TEST_BUCKET }).promise())
.then(() => s3.deleteBucket({ Bucket: NONVERSIONED_BUCKET }).promise())
.then(() => done());
});

View File

135
yarn.lock
View File

@ -466,6 +466,15 @@ agent-base@~4.2.1:
dependencies:
es6-promisify "^5.0.0"
agentkeepalive@^4.1.3:
version "4.1.3"
resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.1.3.tgz#360a09d743a1f4fde749f9ba07caa6575d08259a"
integrity sha512-wn8fw19xKZwdGPO47jivonaHRTd+nGOMP1z11sgGeQzDy2xd5FG0R67dIMcKHDE2cJ5y+YXV30XVGUBPRSY7Hg==
dependencies:
debug "^4.1.0"
depd "^1.1.2"
humanize-ms "^1.2.1"
ajv-keywords@^1.0.0:
version "1.5.1"
resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-1.5.1.tgz#314dd0a4b3368fad3dfcdc54ede6171b886daf3c"
@ -614,12 +623,13 @@ arraybuffer.slice@~0.0.7:
resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675"
integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==
"arsenal@github:scality/Arsenal#26a00ba":
"arsenal@github:scality/Arsenal#w/7.9/bugfix/S3C-2201-econnreset-rest-client-keep-alive":
version "7.7.0"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/26a00babb422403691c7b5c6fff31d24cc721388"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/b8bef65f002f2d396747b736fa7bd6b8c87d0e7b"
dependencies:
"@hapi/joi" "^15.1.0"
JSONStream "^1.0.0"
agentkeepalive "^4.1.3"
ajv "6.12.2"
async "~2.1.5"
debug "~2.6.9"
@ -639,81 +649,6 @@ arraybuffer.slice@~0.0.7:
optionalDependencies:
ioctl "2.0.0"
arsenal@scality/Arsenal#32c895b:
version "7.4.3"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/32c895b21a31eb67dacc6e76d7f58b8142bf3ad1"
dependencies:
"@hapi/joi" "^15.1.0"
JSONStream "^1.0.0"
ajv "4.10.0"
async "~2.1.5"
debug "~2.3.3"
diskusage "^1.1.1"
ioredis "4.9.5"
ipaddr.js "1.2.0"
level "~5.0.1"
level-sublevel "~6.6.5"
node-forge "^0.7.1"
simple-glob "^0.1"
socket.io "~1.7.3"
socket.io-client "~1.7.3"
utf8 "2.1.2"
uuid "^3.0.1"
werelogs scality/werelogs#0ff7ec82
xml2js "~0.4.16"
optionalDependencies:
ioctl "2.0.0"
arsenal@scality/Arsenal#9f2e74e:
version "7.4.3"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/9f2e74ec6972527c2a9ca6ecb4155618f123fc19"
dependencies:
JSONStream "^1.0.0"
ajv "4.10.0"
async "~2.1.5"
debug "~2.3.3"
diskusage "^1.1.1"
ioredis "4.9.5"
ipaddr.js "1.2.0"
joi "^10.6"
level "~5.0.1"
level-sublevel "~6.6.5"
node-forge "^0.7.1"
simple-glob "^0.1"
socket.io "~1.7.3"
socket.io-client "~1.7.3"
utf8 "2.1.2"
uuid "^3.0.1"
werelogs scality/werelogs#0ff7ec82
xml2js "~0.4.16"
optionalDependencies:
ioctl "2.0.0"
arsenal@scality/Arsenal#b03f5b8:
version "7.4.3"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/b03f5b80acd6acaaf8dd2d49cd955e6b95279ab3"
dependencies:
JSONStream "^1.0.0"
ajv "4.10.0"
async "~2.1.5"
debug "~2.3.3"
diskusage "^1.1.1"
ioredis "4.9.5"
ipaddr.js "1.2.0"
joi "^10.6"
level "~5.0.1"
level-sublevel "~6.6.5"
node-forge "^0.7.1"
simple-glob "^0.1"
socket.io "~1.7.3"
socket.io-client "~1.7.3"
utf8 "2.1.2"
uuid "^3.0.1"
werelogs scality/werelogs#0ff7ec82
xml2js "~0.4.16"
optionalDependencies:
ioctl "2.0.0"
asap@~2.0.3:
version "2.0.6"
resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46"
@ -789,19 +724,19 @@ asynckit@^0.4.0:
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
aws-sdk@2.363.0:
version "2.363.0"
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.363.0.tgz#6d366a78d5b008fd927d6ff24815d39d78b54778"
integrity sha512-kQOfjzCEllH45OFN0z3fvhpSWDFWu19715A7TztHx6IEWKwwIEyd3b2XhTZtQLJrI1Giv7iGALwH46gybH9HJw==
aws-sdk@2.831.0:
version "2.831.0"
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.831.0.tgz#02607cc911a2136e5aabe624c1282e821830aef2"
integrity sha512-lrOjbGFpjk2xpESyUx2PGsTZgptCy5xycZazPeakNbFO19cOoxjHx3xyxOHsMCYb3pQwns35UvChQT60B4u6cw==
dependencies:
buffer "4.9.1"
buffer "4.9.2"
events "1.1.1"
ieee754 "1.1.8"
ieee754 "1.1.13"
jmespath "0.15.0"
querystring "0.2.0"
sax "1.2.1"
url "0.10.3"
uuid "3.1.0"
uuid "3.3.2"
xml2js "0.4.19"
aws-sdk@^2.2.23:
@ -1095,15 +1030,6 @@ buffer-from@^1.0.0:
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
buffer@4.9.1:
version "4.9.1"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.1.tgz#6d1bb601b07a4efced97094132093027c95bc298"
integrity sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=
dependencies:
base64-js "^1.0.2"
ieee754 "^1.1.4"
isarray "^1.0.0"
buffer@4.9.2:
version "4.9.2"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8"
@ -1605,7 +1531,7 @@ debug@3.1.0, debug@=3.1.0, debug@~3.1.0:
dependencies:
ms "2.0.0"
debug@4, debug@^4.1.1, debug@^4.2.0:
debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.2.0:
version "4.3.1"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee"
integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==
@ -1708,7 +1634,7 @@ denque@^1.1.0:
resolved "https://registry.yarnpkg.com/denque/-/denque-1.4.1.tgz#6744ff7641c148c3f8a69c307e51235c1f4a37cf"
integrity sha512-OfzPuSZKGcgr96rf1oODnfjqBFmr1DVoc/TrItj3Ohe0Ah1C5WX5Baquw/9U9KovnQ88EqmJbD66rKYUQYN1tQ==
depd@~1.1.2:
depd@^1.1.2, depd@~1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=
@ -2931,6 +2857,13 @@ https-proxy-agent@^3.0.0:
agent-base "^4.3.0"
debug "^3.1.0"
humanize-ms@^1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed"
integrity sha1-xG4xWaKT9riW2ikxbYtv6Lt5u+0=
dependencies:
ms "^2.0.0"
iconv-lite@0.4.24, iconv-lite@^0.4.24, iconv-lite@^0.4.4:
version "0.4.24"
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
@ -2943,11 +2876,6 @@ ieee754@1.1.13:
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84"
integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==
ieee754@1.1.8:
version "1.1.8"
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.8.tgz#be33d40ac10ef1926701f6f08a2d86fbfd1ad3e4"
integrity sha1-vjPUCsEO8ZJnAfbwii2G+/0a0+Q=
ieee754@^1.1.13, ieee754@^1.1.4:
version "1.2.1"
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"
@ -4383,7 +4311,7 @@ ms@2.1.2:
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
ms@^2.1.1:
ms@^2.0.0, ms@^2.1.1:
version "2.1.3"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
@ -6781,11 +6709,6 @@ utils-merge@1.0.1:
resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=
uuid@3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.1.0.tgz#3dd3d3e790abc24d7b0d3a034ffababe28ebbc04"
integrity sha512-DIWtzUkw04M4k3bf1IcpS2tngXEL26YUD2M0tMDUpnUrz2hgzUBlD55a4FjdLGPvfHxS6uluGWvaVEqgBcVa+g==
uuid@3.3.2:
version "3.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131"