Compare commits
1 Commits
developmen
...
w/alex/upg
Author | SHA1 | Date |
---|---|---|
alexandre merle | 04983b2527 |
|
@ -22,7 +22,7 @@
|
|||
"@hapi/joi": "^17.1.0",
|
||||
"arsenal": "github:scality/Arsenal#7358bd1",
|
||||
"async": "~2.5.0",
|
||||
"aws-sdk": "2.363.0",
|
||||
"aws-sdk": "2.831.0",
|
||||
"azure-storage": "^2.1.0",
|
||||
"bucketclient": "scality/bucketclient#6d2d5a4",
|
||||
"commander": "^2.9.0",
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const Promise = require('bluebird');
|
||||
const bluebird = require('bluebird');
|
||||
const { S3 } = require('aws-sdk');
|
||||
const projectFixture = require('../fixtures/project');
|
||||
const getConfig = require('../../test/support/config');
|
||||
|
@ -7,12 +7,16 @@ class BucketUtility {
|
|||
constructor(profile = 'default', config = {}) {
|
||||
const s3Config = getConfig(profile, config);
|
||||
|
||||
this.s3 = Promise.promisifyAll(new S3(s3Config), { suffix: 'Promise' });
|
||||
this.s3 = new S3(s3Config);
|
||||
this.s3.config.setPromisesDependency(bluebird);
|
||||
this.s3.config.update({
|
||||
maxRetries: 0,
|
||||
});
|
||||
}
|
||||
|
||||
createOne(bucketName) {
|
||||
return this.s3
|
||||
.createBucketPromise({ Bucket: bucketName })
|
||||
.createBucket({ Bucket: bucketName }).promise()
|
||||
.then(() => bucketName);
|
||||
}
|
||||
|
||||
|
@ -40,7 +44,7 @@ class BucketUtility {
|
|||
|
||||
deleteOne(bucketName) {
|
||||
return this.s3
|
||||
.deleteBucketPromise({ Bucket: bucketName });
|
||||
.deleteBucket({ Bucket: bucketName }).promise();
|
||||
}
|
||||
|
||||
deleteMany(bucketNames) {
|
||||
|
@ -63,39 +67,39 @@ class BucketUtility {
|
|||
};
|
||||
|
||||
return this.s3
|
||||
.listObjectVersionsPromise(param)
|
||||
.listObjectVersions(param).promise()
|
||||
.then(data =>
|
||||
Promise.all(
|
||||
data.Versions
|
||||
.filter(object => !object.Key.endsWith('/'))
|
||||
// remove all objects
|
||||
.map(object =>
|
||||
this.s3.deleteObjectPromise({
|
||||
this.s3.deleteObject({
|
||||
Bucket: bucketName,
|
||||
Key: object.Key,
|
||||
VersionId: object.VersionId,
|
||||
})
|
||||
}).promise()
|
||||
.then(() => object)
|
||||
)
|
||||
.concat(data.Versions
|
||||
.filter(object => object.Key.endsWith('/'))
|
||||
// remove all directories
|
||||
.map(object =>
|
||||
this.s3.deleteObjectPromise({
|
||||
this.s3.deleteObject({
|
||||
Bucket: bucketName,
|
||||
Key: object.Key,
|
||||
VersionId: object.VersionId,
|
||||
})
|
||||
}).promise()
|
||||
.then(() => object)
|
||||
)
|
||||
)
|
||||
.concat(data.DeleteMarkers
|
||||
.map(object =>
|
||||
this.s3.deleteObjectPromise({
|
||||
this.s3.deleteObject({
|
||||
Bucket: bucketName,
|
||||
Key: object.Key,
|
||||
VersionId: object.VersionId,
|
||||
})
|
||||
}).promise()
|
||||
.then(() => object)))
|
||||
)
|
||||
);
|
||||
|
@ -103,7 +107,7 @@ class BucketUtility {
|
|||
|
||||
getOwner() {
|
||||
return this.s3
|
||||
.listBucketsPromise()
|
||||
.listBuckets().promise()
|
||||
.then(data => data.Owner);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ describe('DELETE bucket cors', () => {
|
|||
});
|
||||
|
||||
describe('with existing bucket', () => {
|
||||
beforeEach(() => s3.createBucketPromise({ Bucket: bucketName }));
|
||||
beforeEach(() => s3.createBucket({ Bucket: bucketName }).promise());
|
||||
afterEach(() => bucketUtil.deleteOne(bucketName));
|
||||
|
||||
describe('without existing cors configuration', () => {
|
||||
|
|
|
@ -25,7 +25,7 @@ describe('DELETE bucket website', () => {
|
|||
});
|
||||
|
||||
describe('with existing bucket', () => {
|
||||
beforeEach(() => s3.createBucketPromise({ Bucket: bucketName }));
|
||||
beforeEach(() => s3.createBucket({ Bucket: bucketName }).promise());
|
||||
afterEach(() => bucketUtil.deleteOne(bucketName));
|
||||
|
||||
describe('without existing configuration', () => {
|
||||
|
|
|
@ -348,9 +348,9 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
|||
|
||||
Promise
|
||||
.mapSeries(test.objectPutParams(Bucket),
|
||||
param => s3.putObjectPromise(param))
|
||||
param => s3.putObject(param).promise())
|
||||
.then(() =>
|
||||
s3.listObjectsPromise(test.listObjectParams(Bucket)))
|
||||
s3.listObjects(test.listObjectParams(Bucket)).promise())
|
||||
.then(data => {
|
||||
const isValidResponse =
|
||||
tv4.validate(data, bucketSchema);
|
||||
|
@ -373,9 +373,10 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
|||
|
||||
Promise
|
||||
.mapSeries(test.objectPutParams(Bucket),
|
||||
param => s3.putObjectPromise(param))
|
||||
param => s3.putObject(param).promise())
|
||||
.then(() =>
|
||||
s3.listObjectsV2Promise(test.listObjectParams(Bucket)))
|
||||
s3.listObjectsV2(test.listObjectParams(Bucket))
|
||||
.promise())
|
||||
.then(data => {
|
||||
const isValidResponse =
|
||||
tv4.validate(data, bucketSchemaV2);
|
||||
|
@ -398,8 +399,8 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
|||
const objects = [{ Bucket, Key: k }];
|
||||
|
||||
Promise
|
||||
.mapSeries(objects, param => s3.putObjectPromise(param))
|
||||
.then(() => s3.listObjectsPromise({ Bucket, Prefix: k }))
|
||||
.mapSeries(objects, param => s3.putObject(param).promise())
|
||||
.then(() => s3.listObjects({ Bucket, Prefix: k }).promise())
|
||||
.then(data => {
|
||||
const isValidResponse = tv4.validate(data,
|
||||
bucketSchema);
|
||||
|
@ -422,8 +423,8 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
|||
const objects = [{ Bucket, Key: k }];
|
||||
|
||||
Promise
|
||||
.mapSeries(objects, param => s3.putObjectPromise(param))
|
||||
.then(() => s3.listObjectsPromise({ Bucket, Marker: k }))
|
||||
.mapSeries(objects, param => s3.putObject(param).promise())
|
||||
.then(() => s3.listObjects({ Bucket, Marker: k }).promise())
|
||||
.then(data => {
|
||||
const isValidResponse = tv4.validate(data,
|
||||
bucketSchema);
|
||||
|
@ -446,9 +447,9 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
|||
const objects = [{ Bucket, Key: k }, { Bucket, Key: 'zzz' }];
|
||||
|
||||
Promise
|
||||
.mapSeries(objects, param => s3.putObjectPromise(param))
|
||||
.then(() => s3.listObjectsPromise({ Bucket, MaxKeys: 1,
|
||||
Delimiter: 'foo' }))
|
||||
.mapSeries(objects, param => s3.putObject(param).promise())
|
||||
.then(() => s3.listObjects({ Bucket, MaxKeys: 1,
|
||||
Delimiter: 'foo' }).promise())
|
||||
.then(data => {
|
||||
const isValidResponse = tv4.validate(data,
|
||||
bucketSchema);
|
||||
|
@ -471,9 +472,9 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
|||
const objects = [{ Bucket, Key: k }];
|
||||
|
||||
Promise
|
||||
.mapSeries(objects, param => s3.putObjectPromise(param))
|
||||
.then(() => s3.listObjectsV2Promise(
|
||||
{ Bucket, StartAfter: k }))
|
||||
.mapSeries(objects, param => s3.putObject(param).promise())
|
||||
.then(() => s3.listObjectsV2(
|
||||
{ Bucket, StartAfter: k }).promise())
|
||||
.then(data => {
|
||||
const isValidResponse = tv4.validate(data,
|
||||
bucketSchemaV2);
|
||||
|
@ -497,9 +498,11 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
|||
const objects = [{ Bucket, Key: k }];
|
||||
|
||||
Promise
|
||||
.mapSeries(objects, param => s3.putObjectPromise(param))
|
||||
.then(() => s3.listObjectsV2Promise(
|
||||
{ Bucket, ContinuationToken: generateToken(k) }))
|
||||
.mapSeries(objects, param => s3.putObject(param).promise())
|
||||
.then(() => s3.listObjectsV2({
|
||||
Bucket,
|
||||
ContinuationToken: generateToken(k),
|
||||
}).promise())
|
||||
.then(data => {
|
||||
const isValidResponse = tv4.validate(data,
|
||||
bucketSchemaV2);
|
||||
|
@ -523,9 +526,9 @@ describe('GET Bucket - AWS.S3.listObjects', () => {
|
|||
const Bucket = bucketName;
|
||||
const objects = [{ Bucket, Key: k }, { Bucket, Key: 'zzz' }];
|
||||
Promise
|
||||
.mapSeries(objects, param => s3.putObjectPromise(param))
|
||||
.then(() => s3.listObjectsV2Promise({ Bucket, MaxKeys: 1,
|
||||
Delimiter: 'foo' }))
|
||||
.mapSeries(objects, param => s3.putObject(param).promise())
|
||||
.then(() => s3.listObjectsV2({ Bucket, MaxKeys: 1,
|
||||
Delimiter: 'foo' }).promise())
|
||||
.then(data => {
|
||||
const isValidResponse = tv4.validate(data,
|
||||
bucketSchemaV2);
|
||||
|
|
|
@ -26,11 +26,11 @@ describe('GET bucket cors', () => {
|
|||
MaxAgeSeconds: 3000 },
|
||||
] };
|
||||
before(() =>
|
||||
s3.createBucketPromise({ Bucket: bucketName })
|
||||
.then(() => s3.putBucketCorsPromise({
|
||||
s3.createBucket({ Bucket: bucketName }).promise()
|
||||
.then(() => s3.putBucketCors({
|
||||
Bucket: bucketName,
|
||||
CORSConfiguration: sampleCors,
|
||||
})));
|
||||
}).promise()));
|
||||
|
||||
it('should return cors configuration successfully', done => {
|
||||
s3.getBucketCors({ Bucket: bucketName }, (err, data) => {
|
||||
|
@ -51,11 +51,11 @@ describe('GET bucket cors', () => {
|
|||
AllowedHeaders: [testValue] },
|
||||
] };
|
||||
before(() =>
|
||||
s3.createBucketPromise({ Bucket: bucketName })
|
||||
.then(() => s3.putBucketCorsPromise({
|
||||
s3.createBucket({ Bucket: bucketName }).promise()
|
||||
.then(() => s3.putBucketCors({
|
||||
Bucket: bucketName,
|
||||
CORSConfiguration: sampleCors,
|
||||
})));
|
||||
}).promise()));
|
||||
|
||||
it('should be preserved when putting / getting cors resource',
|
||||
done => {
|
||||
|
@ -75,11 +75,11 @@ describe('GET bucket cors', () => {
|
|||
AllowedOrigins: ['http://www.example.com'] },
|
||||
] };
|
||||
before(() =>
|
||||
s3.createBucketPromise({ Bucket: bucketName })
|
||||
.then(() => s3.putBucketCorsPromise({
|
||||
s3.createBucket({ Bucket: bucketName }).promise()
|
||||
.then(() => s3.putBucketCors({
|
||||
Bucket: bucketName,
|
||||
CORSConfiguration: sampleCors,
|
||||
})));
|
||||
}).promise()));
|
||||
|
||||
it('should be preserved when retrieving cors resource',
|
||||
done => {
|
||||
|
|
|
@ -23,13 +23,13 @@ describeSkipAWS('GET bucket location ', () => {
|
|||
return;
|
||||
}
|
||||
describe(`with location: ${location}`, () => {
|
||||
before(done => s3.createBucketPromise(
|
||||
before(() => s3.createBucket(
|
||||
{
|
||||
Bucket: bucketName,
|
||||
CreateBucketConfiguration: {
|
||||
LocationConstraint: location,
|
||||
},
|
||||
}, done));
|
||||
}).promise());
|
||||
after(() => bucketUtil.deleteOne(bucketName));
|
||||
|
||||
it(`should return location configuration: ${location} ` +
|
||||
|
@ -48,13 +48,13 @@ describeSkipAWS('GET bucket location ', () => {
|
|||
});
|
||||
|
||||
describe('with location us-east-1', () => {
|
||||
before(done => s3.createBucketPromise(
|
||||
before(() => s3.createBucket(
|
||||
{
|
||||
Bucket: bucketName,
|
||||
CreateBucketConfiguration: {
|
||||
LocationConstraint: 'us-east-1',
|
||||
},
|
||||
}, done));
|
||||
}).promise());
|
||||
afterEach(() => bucketUtil.deleteOne(bucketName));
|
||||
it('should return empty location',
|
||||
done => {
|
||||
|
@ -105,13 +105,13 @@ describeSkipAWS('GET bucket location ', () => {
|
|||
});
|
||||
|
||||
describe('with location configuration', () => {
|
||||
before(done => s3.createBucketPromise(
|
||||
before(() => s3.createBucket(
|
||||
{
|
||||
Bucket: bucketName,
|
||||
CreateBucketConfiguration: {
|
||||
LocationConstraint: 'us-east-1',
|
||||
},
|
||||
}, done));
|
||||
}).promise());
|
||||
after(() => bucketUtil.deleteOne(bucketName));
|
||||
|
||||
it('should return AccessDenied if user is not bucket owner',
|
||||
|
|
|
@ -31,11 +31,11 @@ describe('GET bucket website', () => {
|
|||
|
||||
describe('with existing bucket configuration', () => {
|
||||
before(() =>
|
||||
s3.createBucketPromise({ Bucket: bucketName })
|
||||
.then(() => s3.putBucketWebsitePromise({
|
||||
s3.createBucket({ Bucket: bucketName }).promise()
|
||||
.then(() => s3.putBucketWebsite({
|
||||
Bucket: bucketName,
|
||||
WebsiteConfiguration: config,
|
||||
})));
|
||||
}).promise()));
|
||||
|
||||
it('should return bucket website xml successfully', done => {
|
||||
s3.getBucketWebsite({ Bucket: bucketName }, (err, data) => {
|
||||
|
|
|
@ -198,7 +198,7 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
|||
() => {
|
||||
after(() => bucketUtil.deleteOne(bucketName));
|
||||
it(`should create bucket with location: ${location}`, done => {
|
||||
bucketUtil.s3.createBucketPromise(
|
||||
bucketUtil.s3.createBucket(
|
||||
{
|
||||
Bucket: bucketName,
|
||||
CreateBucketConfiguration: {
|
||||
|
@ -211,7 +211,7 @@ describe('PUT Bucket - AWS.S3.createBucket', () => {
|
|||
|
||||
describe('bucket creation with invalid location', () => {
|
||||
it('should return errors InvalidLocationConstraint', done => {
|
||||
bucketUtil.s3.createBucketPromise(
|
||||
bucketUtil.s3.createBucket(
|
||||
{
|
||||
Bucket: bucketName,
|
||||
CreateBucketConfiguration: {
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
--recursive
|
||||
--timeout 40000
|
||||
--ui tdd
|
||||
--bail
|
|
@ -149,11 +149,11 @@ function testSuite() {
|
|||
process.stdout.write('Creating bucket');
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucket,
|
||||
return s3.createBucket({ Bucket: bucket,
|
||||
CreateBucketConfiguration: {
|
||||
LocationConstraint: awsLocation,
|
||||
},
|
||||
})
|
||||
}).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
|
|
|
@ -38,39 +38,39 @@ describeSkipIfNotMultiple('Multiple backend delete', () => {
|
|||
process.stdout.write('Putting object to mem\n');
|
||||
const params = { Bucket: bucket, Key: memObject, Body: body,
|
||||
Metadata: { 'scal-location-constraint': memLocation } };
|
||||
return s3.putObjectPromise(params);
|
||||
return s3.putObject(params).promise();
|
||||
})
|
||||
.then(() => {
|
||||
process.stdout.write('Putting object to file\n');
|
||||
const params = { Bucket: bucket, Key: fileObject, Body: body,
|
||||
Metadata: { 'scal-location-constraint': fileLocation } };
|
||||
return s3.putObjectPromise(params);
|
||||
return s3.putObject(params).promise();
|
||||
})
|
||||
.then(() => {
|
||||
process.stdout.write('Putting object to AWS\n');
|
||||
const params = { Bucket: bucket, Key: awsObject, Body: body,
|
||||
Metadata: { 'scal-location-constraint': awsLocation } };
|
||||
return s3.putObjectPromise(params);
|
||||
return s3.putObject(params).promise();
|
||||
})
|
||||
.then(() => {
|
||||
process.stdout.write('Putting 0-byte object to AWS\n');
|
||||
const params = { Bucket: bucket, Key: emptyObject,
|
||||
Metadata: { 'scal-location-constraint': awsLocation } };
|
||||
return s3.putObjectPromise(params);
|
||||
return s3.putObject(params).promise();
|
||||
})
|
||||
.then(() => {
|
||||
process.stdout.write('Putting large object to AWS\n');
|
||||
const params = { Bucket: bucket, Key: bigObject,
|
||||
Body: bigBody,
|
||||
Metadata: { 'scal-location-constraint': awsLocation } };
|
||||
return s3.putObjectPromise(params);
|
||||
return s3.putObject(params).promise();
|
||||
})
|
||||
.then(() => {
|
||||
process.stdout.write('Putting object to AWS\n');
|
||||
const params = { Bucket: bucket, Key: mismatchObject,
|
||||
Body: body, Metadata:
|
||||
{ 'scal-location-constraint': awsLocationMismatch } };
|
||||
return s3.putObjectPromise(params);
|
||||
return s3.putObject(params).promise();
|
||||
})
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error putting objects: ${err}\n`);
|
||||
|
|
|
@ -119,7 +119,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
|||
process.stdout.write('Creating bucket\n');
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucket })
|
||||
return s3.createBucket({ Bucket: bucket }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
|
@ -493,7 +493,7 @@ describeSkipIfNotMultiple('AWS backend delete object w. versioning: ' +
|
|||
process.stdout.write('Creating bucket\n');
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise(createBucketParams)
|
||||
return s3.createBucket(createBucketParams).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
|
|
|
@ -36,7 +36,7 @@ function testSuite() {
|
|||
process.stdout.write('Creating bucket');
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: azureContainerName })
|
||||
return s3.createBucket({ Bucket: azureContainerName }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
|
|
|
@ -34,7 +34,7 @@ describe('Multiple backend get object', function testSuite() {
|
|||
process.stdout.write('Creating bucket');
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucket })
|
||||
return s3.createBucket({ Bucket: bucket }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
|
@ -183,46 +183,50 @@ describe('Multiple backend get object', function testSuite() {
|
|||
'(mem/file/AWS)', () => {
|
||||
before(() => {
|
||||
process.stdout.write('Putting object to mem\n');
|
||||
return s3.putObjectPromise({ Bucket: bucket, Key: memObject,
|
||||
return s3.putObject({ Bucket: bucket, Key: memObject,
|
||||
Body: body,
|
||||
Metadata: { 'scal-location-constraint': memLocation } })
|
||||
Metadata: { 'scal-location-constraint': memLocation },
|
||||
}).promise()
|
||||
.then(() => {
|
||||
process.stdout.write('Putting object to file\n');
|
||||
return s3.putObjectPromise({ Bucket: bucket,
|
||||
return s3.putObject({ Bucket: bucket,
|
||||
Key: fileObject,
|
||||
Body: body,
|
||||
Metadata:
|
||||
{ 'scal-location-constraint': fileLocation },
|
||||
});
|
||||
}).promise();
|
||||
})
|
||||
.then(() => {
|
||||
process.stdout.write('Putting object to AWS\n');
|
||||
return s3.putObjectPromise({ Bucket: bucket, Key: awsObject,
|
||||
return s3.putObject({ Bucket: bucket, Key: awsObject,
|
||||
Body: body,
|
||||
Metadata: {
|
||||
'scal-location-constraint': awsLocation } });
|
||||
'scal-location-constraint': awsLocation },
|
||||
}).promise();
|
||||
})
|
||||
.then(() => {
|
||||
process.stdout.write('Putting 0-byte object to mem\n');
|
||||
return s3.putObjectPromise({ Bucket: bucket,
|
||||
return s3.putObject({ Bucket: bucket,
|
||||
Key: emptyObject,
|
||||
Metadata:
|
||||
{ 'scal-location-constraint': memLocation },
|
||||
});
|
||||
}).promise();
|
||||
})
|
||||
.then(() => {
|
||||
process.stdout.write('Putting 0-byte object to AWS\n');
|
||||
return s3.putObjectPromise({ Bucket: bucket,
|
||||
return s3.putObject({ Bucket: bucket,
|
||||
Key: emptyAwsObject,
|
||||
Metadata: {
|
||||
'scal-location-constraint': awsLocation } });
|
||||
'scal-location-constraint': awsLocation },
|
||||
}).promise();
|
||||
})
|
||||
.then(() => {
|
||||
process.stdout.write('Putting large object to AWS\n');
|
||||
return s3.putObjectPromise({ Bucket: bucket,
|
||||
return s3.putObject({ Bucket: bucket,
|
||||
Key: bigObject, Body: bigBody,
|
||||
Metadata: {
|
||||
'scal-location-constraint': awsLocation } });
|
||||
'scal-location-constraint': awsLocation },
|
||||
}).promise();
|
||||
})
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error putting objects: ${err}\n`);
|
||||
|
|
|
@ -46,7 +46,7 @@ function testSuite() {
|
|||
process.stdout.write('Creating bucket');
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucket })
|
||||
return s3.createBucket({ Bucket: bucket }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
|
|
|
@ -32,7 +32,7 @@ function testSuite() {
|
|||
process.stdout.write('Creating bucket');
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: azureContainerName })
|
||||
return s3.createBucket({ Bucket: azureContainerName }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
|
|
|
@ -20,21 +20,23 @@ describeSkipIfNotMultiple('List parts of MPU on Azure data backend', () => {
|
|||
this.currentTest.key = `somekey-${Date.now()}`;
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: azureContainerName })
|
||||
.then(() => s3.createMultipartUploadPromise({
|
||||
return s3.createBucket({ Bucket: azureContainerName }).promise()
|
||||
.then(() => s3.createMultipartUpload({
|
||||
Bucket: azureContainerName, Key: this.currentTest.key,
|
||||
Metadata: { 'scal-location-constraint': azureLocation } }))
|
||||
Metadata: { 'scal-location-constraint': azureLocation },
|
||||
}).promise())
|
||||
.then(res => {
|
||||
this.currentTest.uploadId = res.UploadId;
|
||||
return s3.uploadPartPromise({ Bucket: azureContainerName,
|
||||
return s3.uploadPart({ Bucket: azureContainerName,
|
||||
Key: this.currentTest.key, PartNumber: 1,
|
||||
UploadId: this.currentTest.uploadId, Body: bodyFirstPart });
|
||||
UploadId: this.currentTest.uploadId, Body: bodyFirstPart,
|
||||
}).promise();
|
||||
}).then(res => {
|
||||
this.currentTest.firstEtag = res.ETag;
|
||||
}).then(() => s3.uploadPartPromise({ Bucket: azureContainerName,
|
||||
}).then(() => s3.uploadPart({ Bucket: azureContainerName,
|
||||
Key: this.currentTest.key, PartNumber: 2,
|
||||
UploadId: this.currentTest.uploadId, Body: bodySecondPart })
|
||||
).then(res => {
|
||||
UploadId: this.currentTest.uploadId, Body: bodySecondPart,
|
||||
}).promise()).then(res => {
|
||||
this.currentTest.secondEtag = res.ETag;
|
||||
})
|
||||
.catch(err => {
|
||||
|
@ -45,10 +47,10 @@ describeSkipIfNotMultiple('List parts of MPU on Azure data backend', () => {
|
|||
|
||||
afterEach(function afterEachFn() {
|
||||
process.stdout.write('Emptying bucket');
|
||||
return s3.abortMultipartUploadPromise({
|
||||
return s3.abortMultipartUpload({
|
||||
Bucket: azureContainerName, Key: this.currentTest.key,
|
||||
UploadId: this.currentTest.uploadId,
|
||||
})
|
||||
}).promise()
|
||||
.then(() => bucketUtil.empty(azureContainerName))
|
||||
.then(() => {
|
||||
process.stdout.write('Deleting bucket');
|
||||
|
|
|
@ -109,7 +109,7 @@ function testSuite() {
|
|||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
this.currentTest.awsClient = awsS3;
|
||||
return s3.createBucketPromise({ Bucket: azureContainerName })
|
||||
return s3.createBucket({ Bucket: azureContainerName }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
|
|
|
@ -121,6 +121,7 @@ function testSuite() {
|
|||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
process.stdout.write('Creating bucket\n');
|
||||
s3.createBucketPromise = Promise.promisify(s3.createBucket);
|
||||
if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
|
||||
s3.createBucketPromise = createEncryptedBucketPromise;
|
||||
}
|
||||
|
|
|
@ -113,6 +113,7 @@ function testSuite() {
|
|||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
process.stdout.write('Creating bucket\n');
|
||||
s3.createBucketPromise = Promise.promisify(s3.createBucket);
|
||||
if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
|
||||
s3.createBucketPromise = createEncryptedBucketPromise;
|
||||
}
|
||||
|
|
|
@ -167,7 +167,7 @@ function testSuite() {
|
|||
beforeEach(() => {
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucket })
|
||||
return s3.createBucket({ Bucket: bucket }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
|
|
|
@ -79,6 +79,7 @@ describe('MultipleBackend put object', function testSuite() {
|
|||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
process.stdout.write('Creating bucket\n');
|
||||
s3.createBucketPromise = Promise.promisify(s3.createBucket);
|
||||
if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
|
||||
s3.createBucketPromise = createEncryptedBucketPromise;
|
||||
}
|
||||
|
|
|
@ -108,7 +108,7 @@ describeF() {
|
|||
|
||||
describe('with no bucket location header', () => {
|
||||
beforeEach(() =>
|
||||
s3.createBucketPromise({ Bucket: azureContainerName })
|
||||
s3.createBucket({ Bucket: azureContainerName }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
|
|
|
@ -130,7 +130,7 @@ describeSkipIfE2E('PUT public object with 100-continue header', () => {
|
|||
const signedUrl = s3.getSignedUrl('putObject', params);
|
||||
const { path } = url.parse(signedUrl);
|
||||
continueRequest = new ContinueRequestHandler(path);
|
||||
return s3.createBucketPromise({ Bucket: bucket });
|
||||
return s3.createBucket({ Bucket: bucket }).promise();
|
||||
});
|
||||
|
||||
afterEach(() =>
|
||||
|
|
|
@ -23,13 +23,14 @@ describe('Abort MPU', () => {
|
|||
beforeEach(() => {
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucket })
|
||||
.then(() => s3.createMultipartUploadPromise({
|
||||
Bucket: bucket, Key: key }))
|
||||
return s3.createBucket({ Bucket: bucket }).promise()
|
||||
.then(() => s3.createMultipartUpload({
|
||||
Bucket: bucket, Key: key }).promise())
|
||||
.then(res => {
|
||||
uploadId = res.UploadId;
|
||||
return s3.uploadPartPromise({ Bucket: bucket, Key: key,
|
||||
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
|
||||
return s3.uploadPart({ Bucket: bucket, Key: key,
|
||||
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart,
|
||||
}).promise();
|
||||
})
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error in beforeEach: ${err}\n`);
|
||||
|
@ -38,11 +39,11 @@ describe('Abort MPU', () => {
|
|||
});
|
||||
|
||||
afterEach(() =>
|
||||
s3.abortMultipartUploadPromise({
|
||||
s3.abortMultipartUpload({
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
UploadId: uploadId,
|
||||
})
|
||||
}).promise()
|
||||
.then(() => bucketUtil.empty(bucket))
|
||||
.then(() => bucketUtil.deleteOne(bucket))
|
||||
);
|
||||
|
@ -71,7 +72,7 @@ describe('Abort MPU - No Such Upload', () => {
|
|||
beforeEach(() => {
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucket });
|
||||
return s3.createBucket({ Bucket: bucket }).promise();
|
||||
});
|
||||
|
||||
afterEach(() => bucketUtil.deleteOne(bucket));
|
||||
|
|
|
@ -56,12 +56,17 @@ describe('Complete MPU', () => {
|
|||
|
||||
function _initiateMpuAndPutOnePart() {
|
||||
const result = {};
|
||||
return s3.createMultipartUploadPromise({
|
||||
Bucket: bucket, Key: key })
|
||||
return s3.createMultipartUpload({
|
||||
Bucket: bucket, Key: key }).promise()
|
||||
.then(data => {
|
||||
result.uploadId = data.UploadId;
|
||||
return s3.uploadPartPromise({ Bucket: bucket, Key: key,
|
||||
PartNumber: 1, UploadId: data.UploadId, Body: 'foo' });
|
||||
return s3.uploadPart({
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
PartNumber: 1,
|
||||
UploadId: data.UploadId,
|
||||
Body: 'foo',
|
||||
}).promise();
|
||||
})
|
||||
.then(data => {
|
||||
result.eTag = data.ETag;
|
||||
|
@ -107,8 +112,8 @@ describe('Complete MPU', () => {
|
|||
let uploadId;
|
||||
let eTag;
|
||||
|
||||
beforeEach(() => s3.putBucketVersioningPromise({ Bucket: bucket,
|
||||
VersioningConfiguration: versioningEnabled })
|
||||
beforeEach(() => s3.putBucketVersioning({ Bucket: bucket,
|
||||
VersioningConfiguration: versioningEnabled }).promise()
|
||||
.then(() => _initiateMpuAndPutOnePart())
|
||||
.then(result => {
|
||||
uploadId = result.uploadId;
|
||||
|
@ -126,8 +131,8 @@ describe('Complete MPU', () => {
|
|||
let uploadId;
|
||||
let eTag;
|
||||
|
||||
beforeEach(() => s3.putBucketVersioningPromise({ Bucket: bucket,
|
||||
VersioningConfiguration: versioningSuspended })
|
||||
beforeEach(() => s3.putBucketVersioning({ Bucket: bucket,
|
||||
VersioningConfiguration: versioningSuspended }).promise()
|
||||
.then(() => _initiateMpuAndPutOnePart())
|
||||
.then(result => {
|
||||
uploadId = result.uploadId;
|
||||
|
|
|
@ -40,6 +40,7 @@ describe('Object Part Copy', () => {
|
|||
beforeEach(() => {
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
s3.createBucketPromise = Promise.promisify(s3.createBucket);
|
||||
if (process.env.ENABLE_KMS_ENCRYPTION === 'true') {
|
||||
s3.createBucketPromise = createEncryptedBucketPromise;
|
||||
}
|
||||
|
@ -54,22 +55,22 @@ describe('Object Part Copy', () => {
|
|||
throw err;
|
||||
})
|
||||
.then(() =>
|
||||
s3.putObjectPromise({
|
||||
s3.putObject({
|
||||
Bucket: sourceBucketName,
|
||||
Key: sourceObjName,
|
||||
Body: content,
|
||||
}))
|
||||
}).promise())
|
||||
.then(res => {
|
||||
etag = res.ETag;
|
||||
return s3.headObjectPromise({
|
||||
return s3.headObject({
|
||||
Bucket: sourceBucketName,
|
||||
Key: sourceObjName,
|
||||
});
|
||||
}).promise();
|
||||
}).then(() =>
|
||||
s3.createMultipartUploadPromise({
|
||||
s3.createMultipartUpload({
|
||||
Bucket: destBucketName,
|
||||
Key: destObjName,
|
||||
})).then(iniateRes => {
|
||||
}).promise()).then(iniateRes => {
|
||||
uploadId = iniateRes.UploadId;
|
||||
}).catch(err => {
|
||||
process.stdout.write(`Error in outer beforeEach: ${err}\n`);
|
||||
|
@ -79,11 +80,11 @@ describe('Object Part Copy', () => {
|
|||
|
||||
afterEach(() => bucketUtil.empty(sourceBucketName)
|
||||
.then(() => bucketUtil.empty(destBucketName))
|
||||
.then(() => s3.abortMultipartUploadPromise({
|
||||
.then(() => s3.abortMultipartUpload({
|
||||
Bucket: destBucketName,
|
||||
Key: destObjName,
|
||||
UploadId: uploadId,
|
||||
}))
|
||||
}).promise())
|
||||
.catch(err => {
|
||||
if (err.code !== 'NoSuchUpload') {
|
||||
process.stdout.write(`Error in afterEach: ${err}\n`);
|
||||
|
@ -335,10 +336,10 @@ describe('Object Part Copy', () => {
|
|||
const otherPartBuff = Buffer.alloc(5242880, 1);
|
||||
otherMd5HashPart.update(otherPartBuff);
|
||||
const otherPartHash = otherMd5HashPart.digest('hex');
|
||||
return s3.createMultipartUploadPromise({
|
||||
return s3.createMultipartUpload({
|
||||
Bucket: sourceBucketName,
|
||||
Key: sourceMpuKey,
|
||||
}).then(iniateRes => {
|
||||
}).promise().then(iniateRes => {
|
||||
sourceMpuId = iniateRes.UploadId;
|
||||
}).catch(err => {
|
||||
process.stdout.write(`Error initiating MPU ' +
|
||||
|
@ -349,13 +350,13 @@ describe('Object Part Copy', () => {
|
|||
for (let i = 1; i < 10; i++) {
|
||||
const partBuffHere = i % 2 ? partBuff : otherPartBuff;
|
||||
const partHashHere = i % 2 ? partHash : otherPartHash;
|
||||
partUploads.push(s3.uploadPartPromise({
|
||||
partUploads.push(s3.uploadPart({
|
||||
Bucket: sourceBucketName,
|
||||
Key: sourceMpuKey,
|
||||
PartNumber: i,
|
||||
UploadId: sourceMpuId,
|
||||
Body: partBuffHere,
|
||||
}));
|
||||
}).promise());
|
||||
parts.push({
|
||||
ETag: partHashHere,
|
||||
PartNumber: i,
|
||||
|
@ -369,14 +370,14 @@ describe('Object Part Copy', () => {
|
|||
throw err;
|
||||
}).then(() => {
|
||||
process.stdout.write('completing mpu');
|
||||
return s3.completeMultipartUploadPromise({
|
||||
return s3.completeMultipartUpload({
|
||||
Bucket: sourceBucketName,
|
||||
Key: sourceMpuKey,
|
||||
UploadId: sourceMpuId,
|
||||
MultipartUpload: {
|
||||
Parts: parts,
|
||||
},
|
||||
});
|
||||
}).promise();
|
||||
}).then(() => {
|
||||
process.stdout.write('finished completing mpu');
|
||||
}).catch(err => {
|
||||
|
@ -385,11 +386,11 @@ describe('Object Part Copy', () => {
|
|||
});
|
||||
});
|
||||
|
||||
afterEach(() => s3.abortMultipartUploadPromise({
|
||||
afterEach(() => s3.abortMultipartUpload({
|
||||
Bucket: sourceBucketName,
|
||||
Key: sourceMpuKey,
|
||||
UploadId: sourceMpuId,
|
||||
}).catch(err => {
|
||||
}).promise().catch(err => {
|
||||
if (err.code !== 'NoSuchUpload'
|
||||
&& err.code !== 'NoSuchBucket') {
|
||||
process.stdout.write(`Error in afterEach: ${err}\n`);
|
||||
|
@ -418,27 +419,27 @@ describe('Object Part Copy', () => {
|
|||
it('should copy two parts from a source bucket to a different ' +
|
||||
'destination bucket and complete the MPU', () => {
|
||||
process.stdout.write('Putting first part in MPU test');
|
||||
return s3.uploadPartCopyPromise({ Bucket: destBucketName,
|
||||
return s3.uploadPartCopy({ Bucket: destBucketName,
|
||||
Key: destObjName,
|
||||
CopySource: `${sourceBucketName}/${sourceMpuKey}`,
|
||||
PartNumber: 1,
|
||||
UploadId: uploadId,
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.ETag, totalMpuObjectHash);
|
||||
assert(res.LastModified);
|
||||
}).then(() => {
|
||||
process.stdout.write('Putting second part in MPU test');
|
||||
return s3.uploadPartCopyPromise({ Bucket: destBucketName,
|
||||
return s3.uploadPartCopy({ Bucket: destBucketName,
|
||||
Key: destObjName,
|
||||
CopySource: `${sourceBucketName}/${sourceMpuKey}`,
|
||||
PartNumber: 2,
|
||||
UploadId: uploadId,
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.ETag, totalMpuObjectHash);
|
||||
assert(res.LastModified);
|
||||
}).then(() => {
|
||||
process.stdout.write('Completing MPU');
|
||||
return s3.completeMultipartUploadPromise({
|
||||
return s3.completeMultipartUpload({
|
||||
Bucket: destBucketName,
|
||||
Key: destObjName,
|
||||
UploadId: uploadId,
|
||||
|
@ -448,7 +449,7 @@ describe('Object Part Copy', () => {
|
|||
{ ETag: totalMpuObjectHash, PartNumber: 2 },
|
||||
],
|
||||
},
|
||||
});
|
||||
}).promise();
|
||||
}).then(res => {
|
||||
assert.strictEqual(res.Bucket, destBucketName);
|
||||
assert.strictEqual(res.Key, destObjName);
|
||||
|
@ -472,29 +473,29 @@ describe('Object Part Copy', () => {
|
|||
// with number of parts at the end)
|
||||
const finalCombinedETag =
|
||||
'"e08ede4e8b942e18537cb2289f613ae3-2"';
|
||||
return s3.uploadPartCopyPromise({ Bucket: destBucketName,
|
||||
return s3.uploadPartCopy({ Bucket: destBucketName,
|
||||
Key: destObjName,
|
||||
CopySource: `${sourceBucketName}/${sourceMpuKey}`,
|
||||
PartNumber: 1,
|
||||
UploadId: uploadId,
|
||||
CopySourceRange: 'bytes=5242890-15242880',
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.ETag, part1ETag);
|
||||
assert(res.LastModified);
|
||||
}).then(() => {
|
||||
process.stdout.write('Putting second part in MPU test');
|
||||
return s3.uploadPartCopyPromise({ Bucket: destBucketName,
|
||||
return s3.uploadPartCopy({ Bucket: destBucketName,
|
||||
Key: destObjName,
|
||||
CopySource: `${sourceBucketName}/${sourceMpuKey}`,
|
||||
PartNumber: 2,
|
||||
UploadId: uploadId,
|
||||
CopySourceRange: 'bytes=15242891-30242991',
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.ETag, part2ETag);
|
||||
assert(res.LastModified);
|
||||
}).then(() => {
|
||||
process.stdout.write('Completing MPU');
|
||||
return s3.completeMultipartUploadPromise({
|
||||
return s3.completeMultipartUpload({
|
||||
Bucket: destBucketName,
|
||||
Key: destObjName,
|
||||
UploadId: uploadId,
|
||||
|
@ -504,17 +505,17 @@ describe('Object Part Copy', () => {
|
|||
{ ETag: part2ETag, PartNumber: 2 },
|
||||
],
|
||||
},
|
||||
});
|
||||
}).promise();
|
||||
}).then(res => {
|
||||
assert.strictEqual(res.Bucket, destBucketName);
|
||||
assert.strictEqual(res.Key, destObjName);
|
||||
assert.strictEqual(res.ETag, finalCombinedETag);
|
||||
}).then(() => {
|
||||
process.stdout.write('Getting new object');
|
||||
return s3.getObjectPromise({
|
||||
return s3.getObject({
|
||||
Bucket: destBucketName,
|
||||
Key: destObjName,
|
||||
});
|
||||
}).promise();
|
||||
}).then(res => {
|
||||
assert.strictEqual(res.ContentLength, 25000092);
|
||||
assert.strictEqual(res.ETag, finalCombinedETag);
|
||||
|
@ -529,27 +530,27 @@ describe('Object Part Copy', () => {
|
|||
// AWS response etag for this completed MPU
|
||||
const finalObjETag = '"db77ebbae9e9f5a244a26b86193ad818-1"';
|
||||
process.stdout.write('Putting first part in MPU test');
|
||||
return s3.uploadPartCopyPromise({ Bucket: destBucketName,
|
||||
return s3.uploadPartCopy({ Bucket: destBucketName,
|
||||
Key: destObjName,
|
||||
CopySource: `${sourceBucketName}/${sourceMpuKey}`,
|
||||
PartNumber: 1,
|
||||
UploadId: uploadId,
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.ETag, totalMpuObjectHash);
|
||||
assert(res.LastModified);
|
||||
}).then(() => {
|
||||
process.stdout.write('Overwriting first part in MPU test');
|
||||
return s3.uploadPartCopyPromise({ Bucket: destBucketName,
|
||||
return s3.uploadPartCopy({ Bucket: destBucketName,
|
||||
Key: destObjName,
|
||||
CopySource: `${sourceBucketName}/${sourceObjName}`,
|
||||
PartNumber: 1,
|
||||
UploadId: uploadId });
|
||||
UploadId: uploadId }).promise();
|
||||
}).then(res => {
|
||||
assert.strictEqual(res.ETag, etag);
|
||||
assert(res.LastModified);
|
||||
}).then(() => {
|
||||
process.stdout.write('Completing MPU');
|
||||
return s3.completeMultipartUploadPromise({
|
||||
return s3.completeMultipartUpload({
|
||||
Bucket: destBucketName,
|
||||
Key: destObjName,
|
||||
UploadId: uploadId,
|
||||
|
@ -558,7 +559,7 @@ describe('Object Part Copy', () => {
|
|||
{ ETag: etag, PartNumber: 1 },
|
||||
],
|
||||
},
|
||||
});
|
||||
}).promise();
|
||||
}).then(res => {
|
||||
assert.strictEqual(res.Bucket, destBucketName);
|
||||
assert.strictEqual(res.Key, destObjName);
|
||||
|
@ -566,10 +567,10 @@ describe('Object Part Copy', () => {
|
|||
}).then(() => {
|
||||
process.stdout.write('Getting object put by MPU with ' +
|
||||
'overwrite part');
|
||||
return s3.getObjectPromise({
|
||||
return s3.getObject({
|
||||
Bucket: destBucketName,
|
||||
Key: destObjName,
|
||||
});
|
||||
}).promise();
|
||||
}).then(res => {
|
||||
assert.strictEqual(res.ETag, finalObjETag);
|
||||
}).catch(err => {
|
||||
|
@ -650,18 +651,18 @@ describe('Object Part Copy', () => {
|
|||
|
||||
beforeEach(() => {
|
||||
process.stdout.write('In other account before each');
|
||||
return otherAccountS3.createBucketPromise({ Bucket:
|
||||
otherAccountBucket })
|
||||
return otherAccountS3.createBucket({ Bucket:
|
||||
otherAccountBucket }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write('Error creating other account ' +
|
||||
`bucket: ${err}\n`);
|
||||
throw err;
|
||||
}).then(() => {
|
||||
process.stdout.write('Initiating other account MPU');
|
||||
return otherAccountS3.createMultipartUploadPromise({
|
||||
return otherAccountS3.createMultipartUpload({
|
||||
Bucket: otherAccountBucket,
|
||||
Key: otherAccountKey,
|
||||
});
|
||||
}).promise();
|
||||
}).then(iniateRes => {
|
||||
otherAccountUploadId = iniateRes.UploadId;
|
||||
}).catch(err => {
|
||||
|
@ -672,11 +673,11 @@ describe('Object Part Copy', () => {
|
|||
});
|
||||
|
||||
afterEach(() => otherAccountBucketUtility.empty(otherAccountBucket)
|
||||
.then(() => otherAccountS3.abortMultipartUploadPromise({
|
||||
.then(() => otherAccountS3.abortMultipartUpload({
|
||||
Bucket: otherAccountBucket,
|
||||
Key: otherAccountKey,
|
||||
UploadId: otherAccountUploadId,
|
||||
}))
|
||||
}).promise())
|
||||
.catch(err => {
|
||||
if (err.code !== 'NoSuchUpload') {
|
||||
process.stdout.write('Error in other account ' +
|
||||
|
|
|
@ -54,10 +54,10 @@ describe('DELETE multipart', () => {
|
|||
`${confLocation.name}`,
|
||||
() => {
|
||||
beforeEach(() =>
|
||||
s3.createBucketPromise({ Bucket: bucket,
|
||||
s3.createBucket({ Bucket: bucket,
|
||||
CreateBucketConfiguration: {
|
||||
LocationConstraint: confLocation.location,
|
||||
} })
|
||||
} }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error in beforeEach: ${err}\n`);
|
||||
throw err;
|
||||
|
@ -89,10 +89,10 @@ describe('DELETE multipart', () => {
|
|||
let uploadId;
|
||||
|
||||
beforeEach(() =>
|
||||
s3.createMultipartUploadPromise({
|
||||
s3.createMultipartUpload({
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
})
|
||||
}).promise()
|
||||
.then(res => {
|
||||
uploadId = res.UploadId;
|
||||
return s3.uploadPart({
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
const assert = require('assert');
|
||||
const Promise = require('bluebird');
|
||||
const withV4 = require('../support/withV4');
|
||||
const BucketUtility = require('../../lib/utility/bucket-util');
|
||||
|
||||
|
@ -15,11 +14,13 @@ describe('DELETE object', () => {
|
|||
|
||||
before(() => {
|
||||
process.stdout.write('creating bucket\n');
|
||||
return s3.createBucketPromise({ Bucket: bucketName })
|
||||
return s3.createBucket({ Bucket: bucketName }).promise()
|
||||
.then(() => {
|
||||
process.stdout.write('initiating multipart upload\n');
|
||||
return s3.createMultipartUploadPromise({ Bucket: bucketName,
|
||||
Key: objectName });
|
||||
return s3.createMultipartUpload({
|
||||
Bucket: bucketName,
|
||||
Key: objectName,
|
||||
}).promise();
|
||||
})
|
||||
.then(res => {
|
||||
process.stdout.write('uploading parts\n');
|
||||
|
@ -27,9 +28,10 @@ describe('DELETE object', () => {
|
|||
const uploads = [];
|
||||
for (let i = 1; i <= 3; i++) {
|
||||
uploads.push(
|
||||
s3.uploadPartPromise({ Bucket: bucketName,
|
||||
s3.uploadPart({ Bucket: bucketName,
|
||||
Key: objectName, PartNumber: i, Body: testfile,
|
||||
UploadId: uploadId })
|
||||
UploadId: uploadId,
|
||||
}).promise()
|
||||
);
|
||||
}
|
||||
return Promise.all(uploads);
|
||||
|
@ -40,7 +42,7 @@ describe('DELETE object', () => {
|
|||
})
|
||||
.then(res => {
|
||||
process.stdout.write('about to complete multipart upload\n');
|
||||
return s3.completeMultipartUploadPromise({
|
||||
return s3.completeMultipartUpload({
|
||||
Bucket: bucketName,
|
||||
Key: objectName,
|
||||
UploadId: uploadId,
|
||||
|
@ -51,7 +53,7 @@ describe('DELETE object', () => {
|
|||
{ ETag: res[2].ETag, PartNumber: 3 },
|
||||
],
|
||||
},
|
||||
});
|
||||
}).promise();
|
||||
})
|
||||
.catch(err => {
|
||||
process.stdout.write(`completeMultipartUpload error: ${err}\n`);
|
||||
|
|
|
@ -101,7 +101,6 @@ describe('GET object', () => {
|
|||
UploadId: uploadId,
|
||||
Body: Buffer.alloc(partSize).fill(partNumber),
|
||||
};
|
||||
|
||||
return s3.uploadPart(uploadPartParams, (err, data) => {
|
||||
checkNoError(err);
|
||||
ETags = ETags.concat(data.ETag);
|
||||
|
@ -1045,3 +1044,4 @@ describe('GET object', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -43,11 +43,11 @@ describe('GET multipart upload object [Cache-Control, Content-Disposition, ' +
|
|||
})
|
||||
.then(() => {
|
||||
process.stdout.write('creating bucket\n');
|
||||
return s3.createBucketPromise({ Bucket: bucketName });
|
||||
return s3.createBucket({ Bucket: bucketName }).promise();
|
||||
})
|
||||
.then(() => {
|
||||
process.stdout.write('initiating multipart upload\n');
|
||||
return s3.createMultipartUploadPromise(params);
|
||||
return s3.createMultipartUpload(params).promise();
|
||||
})
|
||||
.then(res => {
|
||||
uploadId = res.UploadId;
|
||||
|
@ -75,14 +75,14 @@ describe('GET multipart upload object [Cache-Control, Content-Disposition, ' +
|
|||
() => {
|
||||
const params = { Bucket: bucketName, Key: 'key', PartNumber: 1,
|
||||
UploadId: uploadId };
|
||||
return s3.uploadPartPromise(params)
|
||||
return s3.uploadPart(params).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error in uploadPart ${err}\n`);
|
||||
throw err;
|
||||
})
|
||||
.then(res => {
|
||||
process.stdout.write('about to complete multipart upload\n');
|
||||
return s3.completeMultipartUploadPromise({
|
||||
return s3.completeMultipartUpload({
|
||||
Bucket: bucketName,
|
||||
Key: objectName,
|
||||
UploadId: uploadId,
|
||||
|
@ -91,7 +91,7 @@ describe('GET multipart upload object [Cache-Control, Content-Disposition, ' +
|
|||
{ ETag: res.ETag, PartNumber: 1 },
|
||||
],
|
||||
},
|
||||
});
|
||||
}).promise();
|
||||
})
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error completing upload ${err}\n`);
|
||||
|
@ -99,9 +99,9 @@ describe('GET multipart upload object [Cache-Control, Content-Disposition, ' +
|
|||
})
|
||||
.then(() => {
|
||||
process.stdout.write('about to get object\n');
|
||||
return s3.getObjectPromise({
|
||||
return s3.getObject({
|
||||
Bucket: bucketName, Key: objectName,
|
||||
});
|
||||
}).promise();
|
||||
})
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error getting object ${err}\n`);
|
||||
|
|
|
@ -28,17 +28,17 @@ describe('aws-node-sdk range test of large end position', () => {
|
|||
beforeEach(() => {
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucketName })
|
||||
return s3.createBucket({ Bucket: bucketName }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
}).then(() =>
|
||||
s3.putObjectPromise({
|
||||
s3.putObject({
|
||||
Bucket: bucketName,
|
||||
Key: objName,
|
||||
Body: Buffer.allocUnsafe(2890).fill(0, 0, 2800)
|
||||
.fill(1, 2800),
|
||||
}))
|
||||
}).promise())
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error in beforeEach: ${err}\n`);
|
||||
throw err;
|
||||
|
|
|
@ -18,7 +18,7 @@ describe('Initiate MPU', () => {
|
|||
beforeEach(() => {
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucket })
|
||||
return s3.createBucket({ Bucket: bucket }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
|
|
|
@ -23,16 +23,18 @@ describe('List parts', () => {
|
|||
beforeEach(() => {
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucket })
|
||||
.then(() => s3.createMultipartUploadPromise({
|
||||
Bucket: bucket, Key: key }))
|
||||
return s3.createBucket({ Bucket: bucket }).promise()
|
||||
.then(() => s3.createMultipartUpload({
|
||||
Bucket: bucket, Key: key }).promise())
|
||||
.then(res => {
|
||||
uploadId = res.UploadId;
|
||||
return s3.uploadPartPromise({ Bucket: bucket, Key: key,
|
||||
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
|
||||
}).then(() => s3.uploadPartPromise({ Bucket: bucket, Key: key,
|
||||
PartNumber: 2, UploadId: uploadId, Body: bodySecondPart })
|
||||
).then(res => {
|
||||
return s3.uploadPart({ Bucket: bucket, Key: key,
|
||||
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart,
|
||||
}).promise();
|
||||
}).then(() => s3.uploadPart({
|
||||
Bucket: bucket, Key: key,
|
||||
PartNumber: 2, UploadId: uploadId, Body: bodySecondPart,
|
||||
}).promise()).then(res => {
|
||||
secondEtag = res.ETag;
|
||||
return secondEtag;
|
||||
})
|
||||
|
@ -44,9 +46,9 @@ describe('List parts', () => {
|
|||
|
||||
afterEach(() => {
|
||||
process.stdout.write('Emptying bucket');
|
||||
return s3.abortMultipartUploadPromise({
|
||||
return s3.abortMultipartUpload({
|
||||
Bucket: bucket, Key: key, UploadId: uploadId,
|
||||
})
|
||||
}).promise()
|
||||
.then(() => bucketUtil.empty(bucket))
|
||||
.then(() => {
|
||||
process.stdout.write('Deleting bucket');
|
||||
|
@ -80,13 +82,13 @@ describe('List parts', () => {
|
|||
/* eslint-disable no-param-reassign */
|
||||
function createPart(sigCfg, bucketUtil, s3, key) {
|
||||
let uploadId;
|
||||
return s3.createBucketPromise({ Bucket: bucket })
|
||||
.then(() => s3.createMultipartUploadPromise({
|
||||
Bucket: bucket, Key: key }))
|
||||
return s3.createBucket({ Bucket: bucket }).promise()
|
||||
.then(() => s3.createMultipartUpload({
|
||||
Bucket: bucket, Key: key }).promise())
|
||||
.then(res => {
|
||||
uploadId = res.UploadId;
|
||||
return s3.uploadPartPromise({ Bucket: bucket, Key: key,
|
||||
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
|
||||
return s3.uploadPart({ Bucket: bucket, Key: key,
|
||||
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart }).promise();
|
||||
})
|
||||
.then(() => Promise.resolve(uploadId));
|
||||
}
|
||||
|
@ -94,9 +96,9 @@ function createPart(sigCfg, bucketUtil, s3, key) {
|
|||
function deletePart(s3, bucketUtil, key, uploadId) {
|
||||
process.stdout.write('Emptying bucket');
|
||||
|
||||
return s3.abortMultipartUploadPromise({
|
||||
return s3.abortMultipartUpload({
|
||||
Bucket: bucket, Key: key, UploadId: uploadId,
|
||||
})
|
||||
}).promise()
|
||||
.then(() => bucketUtil.empty(bucket))
|
||||
.then(() => {
|
||||
process.stdout.write('Deleting bucket');
|
||||
|
|
|
@ -85,34 +85,34 @@ describe('aws-node-sdk test suite of listMultipartUploads', () =>
|
|||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
|
||||
return s3.createBucketPromise({ Bucket: bucket })
|
||||
return s3.createBucket({ Bucket: bucket }).promise()
|
||||
.then(() => bucketUtil.getOwner())
|
||||
.then(res => {
|
||||
// The owner of the bucket will also be the MPU upload owner.
|
||||
data.displayName = res.DisplayName;
|
||||
data.userId = res.ID;
|
||||
})
|
||||
.then(() => s3.createMultipartUploadPromise({
|
||||
.then(() => s3.createMultipartUpload({
|
||||
Bucket: bucket,
|
||||
Key: objectKey,
|
||||
}))
|
||||
}).promise())
|
||||
.then(res => {
|
||||
data.uploadId = res.UploadId;
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() =>
|
||||
s3.abortMultipartUploadPromise({
|
||||
s3.abortMultipartUpload({
|
||||
Bucket: bucket,
|
||||
Key: objectKey,
|
||||
UploadId: data.uploadId,
|
||||
})
|
||||
}).promise()
|
||||
.then(() => bucketUtil.empty(bucket))
|
||||
.then(() => bucketUtil.deleteOne(bucket))
|
||||
);
|
||||
|
||||
it('should list ongoing multipart uploads', () =>
|
||||
s3.listMultipartUploadsPromise({ Bucket: bucket })
|
||||
s3.listMultipartUploads({ Bucket: bucket }).promise()
|
||||
.then(res => checkValues(res, data))
|
||||
);
|
||||
|
||||
|
@ -121,22 +121,22 @@ describe('aws-node-sdk test suite of listMultipartUploads', () =>
|
|||
data.delimiter = 'test-delimiter';
|
||||
data.maxUploads = 1;
|
||||
|
||||
return s3.listMultipartUploadsPromise({
|
||||
return s3.listMultipartUploads({
|
||||
Bucket: bucket,
|
||||
Prefix: 'to',
|
||||
Delimiter: 'test-delimiter',
|
||||
MaxUploads: 1,
|
||||
})
|
||||
}).promise()
|
||||
.then(res => checkValues(res, data));
|
||||
});
|
||||
|
||||
it('should list 0 multipart uploads when MaxUploads is 0', () => {
|
||||
data.maxUploads = 0;
|
||||
|
||||
return s3.listMultipartUploadsPromise({
|
||||
return s3.listMultipartUploads({
|
||||
Bucket: bucket,
|
||||
MaxUploads: 0,
|
||||
})
|
||||
}).promise()
|
||||
.then(res => checkValues(res, data));
|
||||
});
|
||||
})
|
||||
|
|
|
@ -52,7 +52,7 @@ describe('Multi-Object Delete Success', function success() {
|
|||
signatureVersion: 'v4',
|
||||
});
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucketName })
|
||||
return s3.createBucket({ Bucket: bucketName }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
|
@ -67,11 +67,11 @@ describe('Multi-Object Delete Success', function success() {
|
|||
const putPromises = objects.map(key => {
|
||||
const mustComplete = Math.max(0, queued.length - parallel + 1);
|
||||
const result = Promise.some(queued, mustComplete).then(() =>
|
||||
s3.putObjectPromise({
|
||||
s3.putObject({
|
||||
Bucket: bucketName,
|
||||
Key: key,
|
||||
Body: 'somebody',
|
||||
})
|
||||
}).promise()
|
||||
);
|
||||
queued.push(result);
|
||||
return result;
|
||||
|
@ -83,17 +83,17 @@ describe('Multi-Object Delete Success', function success() {
|
|||
});
|
||||
});
|
||||
|
||||
afterEach(() => s3.deleteBucketPromise({ Bucket: bucketName }));
|
||||
afterEach(() => s3.deleteBucket({ Bucket: bucketName }).promise());
|
||||
|
||||
it('should batch delete 1000 objects', () => {
|
||||
const objects = createObjectsList(1000);
|
||||
return s3.deleteObjectsPromise({
|
||||
return s3.deleteObjects({
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
Quiet: false,
|
||||
},
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.Deleted.length, 1000);
|
||||
// order of returned objects not sorted
|
||||
assert.deepStrictEqual(sortList(res.Deleted), sortList(objects));
|
||||
|
@ -105,13 +105,13 @@ describe('Multi-Object Delete Success', function success() {
|
|||
|
||||
it('should batch delete 1000 objects quietly', () => {
|
||||
const objects = createObjectsList(1000);
|
||||
return s3.deleteObjectsPromise({
|
||||
return s3.deleteObjects({
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
Quiet: true,
|
||||
},
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.Deleted.length, 0);
|
||||
assert.strictEqual(res.Errors.length, 0);
|
||||
}).catch(err => {
|
||||
|
@ -128,24 +128,24 @@ describe('Multi-Object Delete Error Responses', () => {
|
|||
beforeEach(() => {
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucketName })
|
||||
return s3.createBucket({ Bucket: bucketName }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => s3.deleteBucketPromise({ Bucket: bucketName }));
|
||||
afterEach(() => s3.deleteBucket({ Bucket: bucketName }).promise());
|
||||
|
||||
it('should return error if request deletion of more than 1000 objects',
|
||||
() => {
|
||||
const objects = createObjectsList(1001);
|
||||
return s3.deleteObjectsPromise({
|
||||
return s3.deleteObjects({
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
},
|
||||
}).catch(err => {
|
||||
}).promise().catch(err => {
|
||||
checkError(err, 'MalformedXML');
|
||||
});
|
||||
});
|
||||
|
@ -153,12 +153,12 @@ describe('Multi-Object Delete Error Responses', () => {
|
|||
it('should return error if request deletion of 0 objects',
|
||||
() => {
|
||||
const objects = createObjectsList(0);
|
||||
return s3.deleteObjectsPromise({
|
||||
return s3.deleteObjects({
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
},
|
||||
}).catch(err => {
|
||||
}).promise().catch(err => {
|
||||
checkError(err, 'MalformedXML');
|
||||
});
|
||||
});
|
||||
|
@ -166,12 +166,12 @@ describe('Multi-Object Delete Error Responses', () => {
|
|||
it('should return no error if try to delete non-existent objects',
|
||||
() => {
|
||||
const objects = createObjectsList(1000);
|
||||
return s3.deleteObjectsPromise({
|
||||
return s3.deleteObjects({
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
},
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.Deleted.length, 1000);
|
||||
assert.strictEqual(res.Errors.length, 0);
|
||||
}).catch(err => {
|
||||
|
@ -181,12 +181,12 @@ describe('Multi-Object Delete Error Responses', () => {
|
|||
|
||||
it('should return error if no such bucket', () => {
|
||||
const objects = createObjectsList(1);
|
||||
return s3.deleteObjectsPromise({
|
||||
return s3.deleteObjects({
|
||||
Bucket: 'nosuchbucket2323292093',
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
},
|
||||
}).catch(err => {
|
||||
}).promise().catch(err => {
|
||||
checkError(err, 'NoSuchBucket');
|
||||
});
|
||||
});
|
||||
|
@ -204,18 +204,18 @@ describe('Multi-Object Delete Access', function access() {
|
|||
signatureVersion: 'v4',
|
||||
});
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucketName })
|
||||
return s3.createBucket({ Bucket: bucketName }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
})
|
||||
.then(() => {
|
||||
for (let i = 1; i < 501; i++) {
|
||||
createObjects.push(s3.putObjectPromise({
|
||||
createObjects.push(s3.putObject({
|
||||
Bucket: bucketName,
|
||||
Key: `${key}${i}`,
|
||||
Body: 'somebody',
|
||||
}));
|
||||
}).promise());
|
||||
}
|
||||
return Promise.all(createObjects)
|
||||
.catch(err => {
|
||||
|
@ -225,7 +225,7 @@ describe('Multi-Object Delete Access', function access() {
|
|||
});
|
||||
});
|
||||
|
||||
after(() => s3.deleteBucketPromise({ Bucket: bucketName }));
|
||||
after(() => s3.deleteBucket({ Bucket: bucketName }).promise());
|
||||
|
||||
it('should return access denied error for each object where no acl ' +
|
||||
'permission', () => {
|
||||
|
@ -236,13 +236,13 @@ describe('Multi-Object Delete Access', function access() {
|
|||
item.Code = 'AccessDenied';
|
||||
item.Message = 'Access Denied';
|
||||
});
|
||||
return otherAccountS3.deleteObjectsPromise({
|
||||
return otherAccountS3.deleteObjects({
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
Quiet: false,
|
||||
},
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.Deleted.length, 0);
|
||||
assert.deepStrictEqual(sortList(res.Errors), sortList(errorList));
|
||||
assert.strictEqual(res.Errors.length, 500);
|
||||
|
@ -254,13 +254,13 @@ describe('Multi-Object Delete Access', function access() {
|
|||
|
||||
it('should batch delete objects where requester has permission', () => {
|
||||
const objects = createObjectsList(500);
|
||||
return s3.deleteObjectsPromise({
|
||||
return s3.deleteObjects({
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
Quiet: false,
|
||||
},
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.Deleted.length, 500);
|
||||
assert.strictEqual(res.Errors.length, 0);
|
||||
}).catch(err => {
|
||||
|
@ -268,3 +268,4 @@ describe('Multi-Object Delete Access', function access() {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@ describe('Object Copy', () => {
|
|||
});
|
||||
});
|
||||
|
||||
beforeEach(() => s3.putObjectPromise({
|
||||
beforeEach(() => s3.putObject({
|
||||
Bucket: sourceBucketName,
|
||||
Key: sourceObjName,
|
||||
Body: content,
|
||||
|
@ -109,13 +109,13 @@ describe('Object Copy', () => {
|
|||
ContentEncoding: originalContentEncoding,
|
||||
Expires: originalExpires,
|
||||
Tagging: originalTagging,
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
etag = res.ETag;
|
||||
etagTrim = etag.substring(1, etag.length - 1);
|
||||
return s3.headObjectPromise({
|
||||
return s3.headObject({
|
||||
Bucket: sourceBucketName,
|
||||
Key: sourceObjName,
|
||||
});
|
||||
}).promise();
|
||||
}).then(res => {
|
||||
lastModified = res.LastModified;
|
||||
}));
|
||||
|
@ -1201,3 +1201,4 @@ describe('Object Copy', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -60,15 +60,15 @@ describe('HEAD object, conditions', () => {
|
|||
}, fields), cb);
|
||||
}
|
||||
|
||||
beforeEach(() => s3.putObjectPromise({
|
||||
beforeEach(() => s3.putObject({
|
||||
Bucket: bucketName,
|
||||
Key: objectName,
|
||||
Body: 'I am the best content ever',
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
etag = res.ETag;
|
||||
etagTrim = etag.substring(1, etag.length - 1);
|
||||
return s3.headObjectPromise(
|
||||
{ Bucket: bucketName, Key: objectName });
|
||||
return s3.headObject(
|
||||
{ Bucket: bucketName, Key: objectName }).promise();
|
||||
}).then(res => {
|
||||
lastModified = res.LastModified;
|
||||
}));
|
||||
|
|
|
@ -40,7 +40,7 @@ describe('HEAD object, compatibility headers [Cache-Control, ' +
|
|||
ContentEncoding: contentEncoding,
|
||||
Expires: expires,
|
||||
};
|
||||
return s3.putObjectPromise(params);
|
||||
return s3.putObject(params).promise();
|
||||
})
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error with putObject: ${err}\n`);
|
||||
|
|
|
@ -32,13 +32,13 @@ describe('Put object with same key as prior object', () => {
|
|||
.catch(done);
|
||||
});
|
||||
|
||||
beforeEach(() => s3.putObjectPromise({
|
||||
beforeEach(() => s3.putObject({
|
||||
Bucket: bucketName,
|
||||
Key: objectName,
|
||||
Body: 'I am the best content ever',
|
||||
Metadata: firstPutMetadata,
|
||||
}).then(() =>
|
||||
s3.headObjectPromise({ Bucket: bucketName, Key: objectName })
|
||||
}).promise().then(() =>
|
||||
s3.headObject({ Bucket: bucketName, Key: objectName }).promise()
|
||||
).then(res => {
|
||||
assert.deepStrictEqual(res.Metadata, firstPutMetadata);
|
||||
}));
|
||||
|
@ -48,13 +48,13 @@ describe('Put object with same key as prior object', () => {
|
|||
after(() => bucketUtil.deleteOne(bucketName));
|
||||
|
||||
it('should overwrite all user metadata and data on overwrite put',
|
||||
() => s3.putObjectPromise({
|
||||
() => s3.putObject({
|
||||
Bucket: bucketName,
|
||||
Key: objectName,
|
||||
Body: 'Much different',
|
||||
Metadata: secondPutMetadata,
|
||||
}).then(() =>
|
||||
s3.getObjectPromise({ Bucket: bucketName, Key: objectName })
|
||||
}).promise().then(() =>
|
||||
s3.getObject({ Bucket: bucketName, Key: objectName }).promise()
|
||||
).then(res => {
|
||||
assert.deepStrictEqual(res.Metadata, secondPutMetadata);
|
||||
assert.deepStrictEqual(res.Body.toString(),
|
||||
|
|
|
@ -25,7 +25,7 @@ describe('PUT object', () => {
|
|||
beforeEach(() => {
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucket })
|
||||
return s3.createBucket({ Bucket: bucket }).promise()
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
throw err;
|
||||
|
|
|
@ -73,9 +73,9 @@ describe('PUT Object ACL', () => {
|
|||
];
|
||||
|
||||
Promise
|
||||
.mapSeries(objects, param => s3.putObjectPromise(param))
|
||||
.then(() => s3.putObjectAclPromise({ Bucket, Key,
|
||||
ACL: 'public-read' }))
|
||||
.mapSeries(objects, param => s3.putObject(param).promise())
|
||||
.then(() => s3.putObjectAcl({ Bucket, Key,
|
||||
ACL: 'public-read' }).promise())
|
||||
.then(data => {
|
||||
assert(data);
|
||||
done();
|
||||
|
|
|
@ -15,9 +15,9 @@ describe('PUT object', () => {
|
|||
beforeEach(() => {
|
||||
bucketUtil = new BucketUtility('default', sigCfg);
|
||||
s3 = bucketUtil.s3;
|
||||
return s3.createBucketPromise({ Bucket: bucket })
|
||||
.then(() => s3.createMultipartUploadPromise({
|
||||
Bucket: bucket, Key: key }))
|
||||
return s3.createBucket({ Bucket: bucket }).promise()
|
||||
.then(() => s3.createMultipartUpload({
|
||||
Bucket: bucket, Key: key }).promise())
|
||||
.then(res => {
|
||||
uploadId = res.UploadId;
|
||||
return uploadId;
|
||||
|
@ -30,9 +30,9 @@ describe('PUT object', () => {
|
|||
|
||||
afterEach(() => {
|
||||
process.stdout.write('Emptying bucket');
|
||||
return s3.abortMultipartUploadPromise({
|
||||
return s3.abortMultipartUpload({
|
||||
Bucket: bucket, Key: key, UploadId: uploadId,
|
||||
})
|
||||
}).promise()
|
||||
.then(() => bucketUtil.empty(bucket))
|
||||
.then(() => {
|
||||
process.stdout.write('Deleting bucket');
|
||||
|
|
|
@ -35,11 +35,11 @@ function getOuterRange(range, bytes) {
|
|||
// Get the ranged object from a bucket. Write the response body to a file, then
|
||||
// use getRangeExec to check that all the bytes are in the correct location.
|
||||
function checkRanges(range, bytes) {
|
||||
return s3.getObjectPromise({
|
||||
return s3.getObject({
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
Range: `bytes=${range}`,
|
||||
})
|
||||
}).promise()
|
||||
.then(res => {
|
||||
const { begin, end } = getOuterRange(range, bytes);
|
||||
const total = (end - begin) + 1;
|
||||
|
@ -68,13 +68,13 @@ function uploadParts(bytes, uploadId) {
|
|||
return Promise.map([1, 2], part =>
|
||||
execFileAsync('dd', [`if=${name}`, `of=${name}.mpuPart${part}`,
|
||||
'bs=5242880', `skip=${part - 1}`, 'count=1'])
|
||||
.then(() => s3.uploadPartPromise({
|
||||
.then(() => s3.uploadPart({
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
PartNumber: part,
|
||||
UploadId: uploadId,
|
||||
Body: createReadStream(`${name}.mpuPart${part}`),
|
||||
}))
|
||||
}).promise())
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -97,17 +97,17 @@ describe('aws-node-sdk range tests', () => {
|
|||
let uploadId;
|
||||
|
||||
beforeEach(() =>
|
||||
s3.createBucketPromise({ Bucket: bucket })
|
||||
.then(() => s3.createMultipartUploadPromise({
|
||||
s3.createBucket({ Bucket: bucket }).promise()
|
||||
.then(() => s3.createMultipartUpload({
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
}))
|
||||
}).promise())
|
||||
.then(res => {
|
||||
uploadId = res.UploadId;
|
||||
})
|
||||
.then(() => createHashedFile(fileSize))
|
||||
.then(() => uploadParts(fileSize, uploadId))
|
||||
.then(res => s3.completeMultipartUploadPromise({
|
||||
.then(res => s3.completeMultipartUpload({
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
UploadId: uploadId,
|
||||
|
@ -123,15 +123,15 @@ describe('aws-node-sdk range tests', () => {
|
|||
},
|
||||
],
|
||||
},
|
||||
}))
|
||||
}).promise())
|
||||
);
|
||||
|
||||
afterEach(() => bucketUtil.empty(bucket)
|
||||
.then(() => s3.abortMultipartUploadPromise({
|
||||
.then(() => s3.abortMultipartUpload({
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
UploadId: uploadId,
|
||||
}))
|
||||
}).promise())
|
||||
.catch(err => new Promise((resolve, reject) => {
|
||||
if (err.code !== 'NoSuchUpload') {
|
||||
reject(err);
|
||||
|
@ -164,13 +164,13 @@ describe('aws-node-sdk range tests', () => {
|
|||
const fileSize = 2000;
|
||||
|
||||
beforeEach(() =>
|
||||
s3.createBucketPromise({ Bucket: bucket })
|
||||
s3.createBucket({ Bucket: bucket }).promise()
|
||||
.then(() => createHashedFile(fileSize))
|
||||
.then(() => s3.putObjectPromise({
|
||||
.then(() => s3.putObject({
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
Body: createReadStream(`hashedFile.${fileSize}`),
|
||||
})));
|
||||
}).promise()));
|
||||
|
||||
afterEach(() =>
|
||||
bucketUtil.empty(bucket)
|
||||
|
@ -221,13 +221,13 @@ describe('aws-node-sdk range tests', () => {
|
|||
const fileSize = 2900;
|
||||
|
||||
beforeEach(() =>
|
||||
s3.createBucketPromise({ Bucket: bucket })
|
||||
s3.createBucket({ Bucket: bucket }).promise()
|
||||
.then(() => createHashedFile(fileSize))
|
||||
.then(() => s3.putObjectPromise({
|
||||
.then(() => s3.putObject({
|
||||
Bucket: bucket,
|
||||
Key: key,
|
||||
Body: createReadStream(`hashedFile.${fileSize}`),
|
||||
})));
|
||||
}).promise()));
|
||||
|
||||
afterEach(() =>
|
||||
bucketUtil.empty(bucket)
|
||||
|
|
|
@ -34,21 +34,21 @@ describe('User visits bucket website endpoint and requests resource ' +
|
|||
describe('when x-amz-website-redirect-location: /redirect.html', () => {
|
||||
beforeEach(() => {
|
||||
const webConfig = new WebsiteConfigTester('index.html');
|
||||
return s3.putBucketWebsitePromise({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig })
|
||||
.then(() => s3.putObjectPromise({ Bucket: bucket,
|
||||
return s3.putBucketWebsite({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig }).promise()
|
||||
.then(() => s3.putObject({ Bucket: bucket,
|
||||
Key: 'index.html',
|
||||
ACL: 'public-read',
|
||||
Body: fs.readFileSync(path.join(__dirname,
|
||||
'/websiteFiles/index.html')),
|
||||
ContentType: 'text/html',
|
||||
WebsiteRedirectLocation: '/redirect.html' }))
|
||||
.then(() => s3.putObjectPromise({ Bucket: bucket,
|
||||
WebsiteRedirectLocation: '/redirect.html' }).promise())
|
||||
.then(() => s3.putObject({ Bucket: bucket,
|
||||
Key: 'redirect.html',
|
||||
ACL: 'public-read',
|
||||
Body: fs.readFileSync(path.join(__dirname,
|
||||
'/websiteFiles/redirect.html')),
|
||||
ContentType: 'text/html' }));
|
||||
ContentType: 'text/html' }).promise());
|
||||
});
|
||||
|
||||
afterEach(() => bucketUtil.empty(bucket));
|
||||
|
@ -76,15 +76,15 @@ describe('User visits bucket website endpoint and requests resource ' +
|
|||
() => {
|
||||
beforeEach(() => {
|
||||
const webConfig = new WebsiteConfigTester('index.html');
|
||||
return s3.putBucketWebsitePromise({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig })
|
||||
.then(() => s3.putObjectPromise({ Bucket: bucket,
|
||||
return s3.putBucketWebsite({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig }).promise()
|
||||
.then(() => s3.putObject({ Bucket: bucket,
|
||||
Key: 'index.html',
|
||||
ACL: 'public-read',
|
||||
Body: fs.readFileSync(path.join(__dirname,
|
||||
'/websiteFiles/index.html')),
|
||||
ContentType: 'text/html',
|
||||
WebsiteRedirectLocation: 'https://www.google.com' }));
|
||||
WebsiteRedirectLocation: 'https://www.google.com' }).promise());
|
||||
});
|
||||
|
||||
afterEach(() => bucketUtil.empty(bucket));
|
||||
|
@ -112,14 +112,14 @@ describe('User visits bucket website endpoint and requests resource ' +
|
|||
describe('when key with header is private', () => {
|
||||
beforeEach(() => {
|
||||
const webConfig = new WebsiteConfigTester('index.html');
|
||||
return s3.putBucketWebsitePromise({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig })
|
||||
.then(() => s3.putObjectPromise({ Bucket: bucket,
|
||||
return s3.putBucketWebsite({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig }).promise()
|
||||
.then(() => s3.putObject({ Bucket: bucket,
|
||||
Key: 'index.html',
|
||||
Body: fs.readFileSync(path.join(__dirname,
|
||||
'/websiteFiles/index.html')),
|
||||
ContentType: 'text/html',
|
||||
WebsiteRedirectLocation: 'https://www.google.com' }));
|
||||
WebsiteRedirectLocation: 'https://www.google.com' }).promise());
|
||||
});
|
||||
|
||||
afterEach(() => bucketUtil.empty(bucket));
|
||||
|
@ -154,20 +154,20 @@ describe('User visits bucket website endpoint and requests resource ' +
|
|||
HostName: 'www.google.com',
|
||||
};
|
||||
webConfig.addRoutingRule(redirect, condition);
|
||||
return s3.putBucketWebsitePromise({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig })
|
||||
.then(() => s3.putObjectPromise({ Bucket: bucket,
|
||||
return s3.putBucketWebsite({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig }).promise()
|
||||
.then(() => s3.putObject({ Bucket: bucket,
|
||||
Key: 'index.html',
|
||||
Body: fs.readFileSync(path.join(__dirname,
|
||||
'/websiteFiles/index.html')),
|
||||
ContentType: 'text/html',
|
||||
WebsiteRedirectLocation: '/redirect.html' }))
|
||||
.then(() => s3.putObjectPromise({ Bucket: bucket,
|
||||
WebsiteRedirectLocation: '/redirect.html' }).promise())
|
||||
.then(() => s3.putObject({ Bucket: bucket,
|
||||
Key: 'redirect.html',
|
||||
ACL: 'public-read',
|
||||
Body: fs.readFileSync(path.join(__dirname,
|
||||
'/websiteFiles/redirect.html')),
|
||||
ContentType: 'text/html' }));
|
||||
ContentType: 'text/html' }).promise());
|
||||
});
|
||||
|
||||
afterEach(() => bucketUtil.empty(bucket));
|
||||
|
@ -203,15 +203,15 @@ describe('User visits bucket website endpoint and requests resource ' +
|
|||
};
|
||||
const webConfig = new WebsiteConfigTester(null, null,
|
||||
redirectAllTo);
|
||||
return s3.putBucketWebsitePromise({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig })
|
||||
.then(() => s3.putObjectPromise({ Bucket: bucket,
|
||||
return s3.putBucketWebsite({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig }).promise()
|
||||
.then(() => s3.putObject({ Bucket: bucket,
|
||||
Key: 'index.html',
|
||||
ACL: 'public-read',
|
||||
Body: fs.readFileSync(path.join(__dirname,
|
||||
'/websiteFiles/index.html')),
|
||||
ContentType: 'text/html',
|
||||
WebsiteRedirectLocation: '/redirect.html' }));
|
||||
WebsiteRedirectLocation: '/redirect.html' }).promise());
|
||||
});
|
||||
|
||||
afterEach(() => bucketUtil.empty(bucket));
|
||||
|
@ -250,15 +250,15 @@ describe('User visits bucket website endpoint and requests resource ' +
|
|||
HostName: 'www.google.com',
|
||||
};
|
||||
webConfig.addRoutingRule(redirect, condition);
|
||||
return s3.putBucketWebsitePromise({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig })
|
||||
.then(() => s3.putObjectPromise({ Bucket: bucket,
|
||||
return s3.putBucketWebsite({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig }).promise()
|
||||
.then(() => s3.putObject({ Bucket: bucket,
|
||||
Key: 'about/index.html',
|
||||
ACL: 'public-read',
|
||||
Body: fs.readFileSync(path.join(__dirname,
|
||||
'/websiteFiles/index.html')),
|
||||
ContentType: 'text/html',
|
||||
WebsiteRedirectLocation: '/redirect.html' }));
|
||||
WebsiteRedirectLocation: '/redirect.html' }).promise());
|
||||
});
|
||||
|
||||
afterEach(() => bucketUtil.empty(bucket));
|
||||
|
@ -296,21 +296,21 @@ describe('User visits bucket website endpoint and requests resource ' +
|
|||
ReplaceKeyWith: 'redirect.html',
|
||||
};
|
||||
webConfig.addRoutingRule(redirect, condition);
|
||||
return s3.putBucketWebsitePromise({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig })
|
||||
.then(() => s3.putObjectPromise({ Bucket: bucket,
|
||||
return s3.putBucketWebsite({ Bucket: bucket,
|
||||
WebsiteConfiguration: webConfig }).promise()
|
||||
.then(() => s3.putObject({ Bucket: bucket,
|
||||
Key: 'index.html',
|
||||
ACL: 'public-read',
|
||||
Body: fs.readFileSync(path.join(__dirname,
|
||||
'/websiteFiles/index.html')),
|
||||
ContentType: 'text/html',
|
||||
WebsiteRedirectLocation: 'https://www.google.com' }))
|
||||
.then(() => s3.putObjectPromise({ Bucket: bucket,
|
||||
WebsiteRedirectLocation: 'https://www.google.com' }).promise())
|
||||
.then(() => s3.putObject({ Bucket: bucket,
|
||||
Key: 'redirect.html',
|
||||
ACL: 'public-read',
|
||||
Body: fs.readFileSync(path.join(__dirname,
|
||||
'/websiteFiles/redirect.html')),
|
||||
ContentType: 'text/html' }));
|
||||
ContentType: 'text/html' }).promise());
|
||||
});
|
||||
|
||||
afterEach(() => bucketUtil.empty(bucket));
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const assert = require('assert');
|
||||
const tv4 = require('tv4');
|
||||
const Promise = require('bluebird');
|
||||
const bluebird = require('bluebird');
|
||||
const async = require('async');
|
||||
const { S3 } = require('aws-sdk');
|
||||
|
||||
|
@ -153,7 +153,7 @@ describeFn('GET Service - AWS.S3.listBuckets', function getService() {
|
|||
|
||||
it('should list buckets', done => {
|
||||
s3
|
||||
.listBucketsPromise()
|
||||
.listBuckets().promise()
|
||||
.then(data => {
|
||||
const isValidResponse = tv4.validate(data, svcSchema);
|
||||
if (!isValidResponse) {
|
||||
|
@ -196,13 +196,13 @@ describeFn('GET Service - AWS.S3.listBuckets', function getService() {
|
|||
let anotherS3;
|
||||
|
||||
before(() => {
|
||||
anotherS3 = Promise.promisifyAll(new S3(getConfig('lisa')),
|
||||
{ suffix: 'Promise' });
|
||||
anotherS3 = new S3(getConfig('lisa'));
|
||||
anotherS3.config.setPromisesDependency(bluebird);
|
||||
});
|
||||
|
||||
it('should not return other accounts bucket list', done => {
|
||||
anotherS3
|
||||
.listBucketsPromise()
|
||||
.listBuckets().promise()
|
||||
.then(data => {
|
||||
const hasSameBuckets = data.Buckets
|
||||
.filter(filterFn)
|
||||
|
|
|
@ -29,7 +29,7 @@ describe('aws-node-sdk test delete bucket', () => {
|
|||
async.waterfall([
|
||||
next => s3.createBucket({ Bucket: bucketName },
|
||||
err => next(err)),
|
||||
next => s3.putBucketVersioningPromise({
|
||||
next => s3.putBucketVersioning({
|
||||
Bucket: bucketName,
|
||||
VersioningConfiguration: {
|
||||
Status: 'Enabled',
|
||||
|
|
|
@ -94,13 +94,13 @@ describe('Multi-Object Versioning Delete Success', function success() {
|
|||
it('should batch delete 1000 objects quietly', () => {
|
||||
const objects = objectsRes.slice(0, 1000).map(obj =>
|
||||
({ Key: obj.Key, VersionId: obj.VersionId }));
|
||||
return s3.deleteObjectsPromise({
|
||||
return s3.deleteObjects({
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
Quiet: true,
|
||||
},
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.Deleted.length, 0);
|
||||
assert.strictEqual(res.Errors.length, 0);
|
||||
}).catch(err => {
|
||||
|
@ -111,13 +111,13 @@ describe('Multi-Object Versioning Delete Success', function success() {
|
|||
it('should batch delete 1000 objects', () => {
|
||||
const objects = objectsRes.slice(0, 1000).map(obj =>
|
||||
({ Key: obj.Key, VersionId: obj.VersionId }));
|
||||
return s3.deleteObjectsPromise({
|
||||
return s3.deleteObjects({
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
Quiet: false,
|
||||
},
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.Deleted.length, 1000);
|
||||
// order of returned objects not sorted
|
||||
assert.deepStrictEqual(sortList(res.Deleted),
|
||||
|
@ -133,12 +133,12 @@ describe('Multi-Object Versioning Delete Success', function success() {
|
|||
const objects = objectsRes.slice(0, 1000).map(obj =>
|
||||
({ Key: obj.Key, VersionId: obj.VersionId }));
|
||||
objects[0].VersionId = 'invalid-version-id';
|
||||
return s3.deleteObjectsPromise({
|
||||
return s3.deleteObjects({
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
},
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.Deleted.length, 999);
|
||||
assert.strictEqual(res.Errors.length, 1);
|
||||
assert.strictEqual(res.Errors[0].Code, 'NoSuchVersion');
|
||||
|
@ -153,12 +153,12 @@ describe('Multi-Object Versioning Delete Success', function success() {
|
|||
const objects = objectsRes.slice(0, 1000).map(obj =>
|
||||
({ Key: obj.Key, VersionId: obj.VersionId }));
|
||||
objects[0].VersionId = nonExistingId;
|
||||
return s3.deleteObjectsPromise({
|
||||
return s3.deleteObjects({
|
||||
Bucket: bucketName,
|
||||
Delete: {
|
||||
Objects: objects,
|
||||
},
|
||||
}).then(res => {
|
||||
}).promise().then(res => {
|
||||
assert.strictEqual(res.Deleted.length, 1000);
|
||||
assert.strictEqual(res.Errors.length, 0);
|
||||
const foundVersionId = res.Deleted.find(entry =>
|
||||
|
@ -183,7 +183,7 @@ describe('Multi-Object Versioning Delete - deleting delete marker',
|
|||
async.waterfall([
|
||||
next => s3.createBucket({ Bucket: bucketName },
|
||||
err => next(err)),
|
||||
next => s3.putBucketVersioningPromise({
|
||||
next => s3.putBucketVersioning({
|
||||
Bucket: bucketName,
|
||||
VersioningConfiguration: {
|
||||
Status: 'Enabled',
|
||||
|
|
|
@ -88,11 +88,11 @@ describe('Object Version Copy', () => {
|
|||
|
||||
beforeEach(() => bucketUtil.createOne(sourceBucketName)
|
||||
.then(() => bucketUtil.createOne(destBucketName))
|
||||
.then(() => s3.putBucketVersioningPromise({
|
||||
.then(() => s3.putBucketVersioning({
|
||||
Bucket: sourceBucketName,
|
||||
VersioningConfiguration: { Status: 'Enabled' },
|
||||
}))
|
||||
.then(() => s3.putObjectPromise({
|
||||
}).promise())
|
||||
.then(() => s3.putObject({
|
||||
Bucket: sourceBucketName,
|
||||
Key: sourceObjName,
|
||||
Body: content,
|
||||
|
@ -102,22 +102,22 @@ describe('Object Version Copy', () => {
|
|||
ContentEncoding: originalContentEncoding,
|
||||
Expires: originalExpires,
|
||||
Tagging: originalTagging,
|
||||
})).then(res => {
|
||||
}).promise()).then(res => {
|
||||
etag = res.ETag;
|
||||
versionId = res.VersionId;
|
||||
copySource = `${sourceBucketName}/${sourceObjName}` +
|
||||
`?versionId=${versionId}`;
|
||||
etagTrim = etag.substring(1, etag.length - 1);
|
||||
copySourceVersionId = res.VersionId;
|
||||
return s3.headObjectPromise({
|
||||
return s3.headObject({
|
||||
Bucket: sourceBucketName,
|
||||
Key: sourceObjName,
|
||||
});
|
||||
}).promise();
|
||||
}).then(res => {
|
||||
lastModified = res.LastModified;
|
||||
}).then(() => s3.putObjectPromise({ Bucket: sourceBucketName,
|
||||
}).then(() => s3.putObject({ Bucket: sourceBucketName,
|
||||
Key: sourceObjName,
|
||||
Body: secondContent }))
|
||||
Body: secondContent }).promise())
|
||||
);
|
||||
|
||||
afterEach(done => async.parallel([
|
||||
|
|
|
@ -50,7 +50,7 @@ describe('Versioning on a replication source bucket', () => {
|
|||
beforeEach(done => {
|
||||
async.waterfall([
|
||||
cb => s3.createBucket({ Bucket: bucketName }, e => cb(e)),
|
||||
cb => s3.putBucketVersioningPromise({
|
||||
cb => s3.putBucketVersioning({
|
||||
Bucket: bucketName,
|
||||
VersioningConfiguration: {
|
||||
Status: 'Enabled',
|
||||
|
|
|
@ -110,13 +110,15 @@ describeSkipIfAWS('backbeat routes', () => {
|
|||
bucketUtil = new BucketUtility(
|
||||
'default', { signatureVersion: 'v4' });
|
||||
s3 = bucketUtil.s3;
|
||||
s3.createBucketPromise({ Bucket: TEST_BUCKET })
|
||||
.then(() => s3.putBucketVersioningPromise(
|
||||
s3.createBucket({ Bucket: TEST_BUCKET }).promise()
|
||||
.then(() => s3.putBucketVersioning(
|
||||
{
|
||||
Bucket: TEST_BUCKET,
|
||||
VersioningConfiguration: { Status: 'Enabled' },
|
||||
}))
|
||||
.then(() => s3.createBucketPromise({ Bucket: NONVERSIONED_BUCKET }))
|
||||
}).promise())
|
||||
.then(() => s3.createBucket({
|
||||
Bucket: NONVERSIONED_BUCKET,
|
||||
}).promise())
|
||||
.then(() => done())
|
||||
.catch(err => {
|
||||
process.stdout.write(`Error creating bucket: ${err}\n`);
|
||||
|
@ -125,8 +127,9 @@ describeSkipIfAWS('backbeat routes', () => {
|
|||
});
|
||||
after(done => {
|
||||
bucketUtil.empty(TEST_BUCKET)
|
||||
.then(() => s3.deleteBucketPromise({ Bucket: TEST_BUCKET }))
|
||||
.then(() => s3.deleteBucketPromise({ Bucket: NONVERSIONED_BUCKET }))
|
||||
.then(() => s3.deleteBucket({ Bucket: TEST_BUCKET }).promise())
|
||||
.then(() =>
|
||||
s3.deleteBucket({ Bucket: NONVERSIONED_BUCKET }).promise())
|
||||
.then(() => done());
|
||||
});
|
||||
|
||||
|
|
58
yarn.lock
58
yarn.lock
|
@ -282,31 +282,6 @@ arraybuffer.slice@~0.0.7:
|
|||
resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675"
|
||||
integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==
|
||||
|
||||
"arsenal@github:scality/Arsenal#38f851e":
|
||||
version "7.5.0"
|
||||
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/38f851e30e22cbef457b5180e4006b367c1fb6d1"
|
||||
dependencies:
|
||||
"@hapi/joi" "^15.1.0"
|
||||
JSONStream "^1.0.0"
|
||||
ajv "6.12.2"
|
||||
async "~2.1.5"
|
||||
debug "~2.6.9"
|
||||
diskusage "^1.1.1"
|
||||
ioredis "4.9.5"
|
||||
ipaddr.js "1.9.1"
|
||||
level "~5.0.1"
|
||||
level-sublevel "~6.6.5"
|
||||
node-forge "^0.7.1"
|
||||
simple-glob "^0.2"
|
||||
socket.io "~2.3.0"
|
||||
socket.io-client "~2.3.0"
|
||||
utf8 "2.1.2"
|
||||
uuid "^3.0.1"
|
||||
werelogs scality/werelogs#0ff7ec82
|
||||
xml2js "~0.4.23"
|
||||
optionalDependencies:
|
||||
ioctl "2.0.0"
|
||||
|
||||
"arsenal@github:scality/Arsenal#7358bd1":
|
||||
version "7.5.0"
|
||||
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/7358bd10f887a3dd66c4943d1243eeec0b4501ab"
|
||||
|
@ -469,19 +444,19 @@ asynckit@^0.4.0:
|
|||
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
|
||||
integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
|
||||
|
||||
aws-sdk@2.363.0:
|
||||
version "2.363.0"
|
||||
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.363.0.tgz#6d366a78d5b008fd927d6ff24815d39d78b54778"
|
||||
integrity sha512-kQOfjzCEllH45OFN0z3fvhpSWDFWu19715A7TztHx6IEWKwwIEyd3b2XhTZtQLJrI1Giv7iGALwH46gybH9HJw==
|
||||
aws-sdk@2.831.0:
|
||||
version "2.831.0"
|
||||
resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.831.0.tgz#02607cc911a2136e5aabe624c1282e821830aef2"
|
||||
integrity sha512-lrOjbGFpjk2xpESyUx2PGsTZgptCy5xycZazPeakNbFO19cOoxjHx3xyxOHsMCYb3pQwns35UvChQT60B4u6cw==
|
||||
dependencies:
|
||||
buffer "4.9.1"
|
||||
buffer "4.9.2"
|
||||
events "1.1.1"
|
||||
ieee754 "1.1.8"
|
||||
ieee754 "1.1.13"
|
||||
jmespath "0.15.0"
|
||||
querystring "0.2.0"
|
||||
sax "1.2.1"
|
||||
url "0.10.3"
|
||||
uuid "3.1.0"
|
||||
uuid "3.3.2"
|
||||
xml2js "0.4.19"
|
||||
|
||||
aws-sdk@^2.2.23:
|
||||
|
@ -740,15 +715,6 @@ buffer-from@^1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
|
||||
integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
|
||||
|
||||
buffer@4.9.1:
|
||||
version "4.9.1"
|
||||
resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.1.tgz#6d1bb601b07a4efced97094132093027c95bc298"
|
||||
integrity sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=
|
||||
dependencies:
|
||||
base64-js "^1.0.2"
|
||||
ieee754 "^1.1.4"
|
||||
isarray "^1.0.0"
|
||||
|
||||
buffer@4.9.2:
|
||||
version "4.9.2"
|
||||
resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8"
|
||||
|
@ -1928,11 +1894,6 @@ ieee754@1.1.13:
|
|||
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84"
|
||||
integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==
|
||||
|
||||
ieee754@1.1.8:
|
||||
version "1.1.8"
|
||||
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.8.tgz#be33d40ac10ef1926701f6f08a2d86fbfd1ad3e4"
|
||||
integrity sha1-vjPUCsEO8ZJnAfbwii2G+/0a0+Q=
|
||||
|
||||
ieee754@^1.1.13, ieee754@^1.1.4:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352"
|
||||
|
@ -4154,11 +4115,6 @@ util-deprecate@^1.0.1, util-deprecate@~1.0.1:
|
|||
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
|
||||
integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
|
||||
|
||||
uuid@3.1.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.1.0.tgz#3dd3d3e790abc24d7b0d3a034ffababe28ebbc04"
|
||||
integrity sha512-DIWtzUkw04M4k3bf1IcpS2tngXEL26YUD2M0tMDUpnUrz2hgzUBlD55a4FjdLGPvfHxS6uluGWvaVEqgBcVa+g==
|
||||
|
||||
uuid@3.3.2:
|
||||
version "3.3.2"
|
||||
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131"
|
||||
|
|
Loading…
Reference in New Issue