Compare commits

...

1 Commits

Author SHA1 Message Date
alexandre merle f98cc088a5 S3C-3949: Abort Copy object when request aborted
Aborting the remainings copy of objects if the client
request is aborted, which will prevent copyObject to stack on each
others and perfoming a DOS of sproxyd connections in case of retry.
2021-02-22 16:52:30 +01:00
2 changed files with 24 additions and 2 deletions

View File

@ -506,6 +506,12 @@ const data = {
});
}
let aborted = false;
request.once('aborted', () => {
aborted = true;
});
// dataLocator is an array. need to get and put all parts
// For now, copy 1 part at a time. Could increase the second
// argument here to increase the number of parts
@ -513,6 +519,10 @@ const data = {
return async.mapLimit(dataLocator, 1,
// eslint-disable-next-line prefer-arrow-callback
function copyPart(part, copyCb) {
if (aborted === true) {
return copyCb(errors.InternalError.customizeDescription(
'aborted'));
}
if (part.dataStoreType === 'azure') {
const passThrough = new PassThrough();
return async.parallel([
@ -538,7 +548,7 @@ const data = {
});
}, (err, results) => {
if (err) {
log.debug('error transferring data from source',
log.error('error transferring data from source',
{ error: err });
return cb(err);
}
@ -749,11 +759,22 @@ const data = {
// to collect the md5 from multiple streams
let totalHash;
const locations = [];
let aborted = false;
request.once('aborted', () => {
aborted = true;
});
// dataLocator is an array. need to get and put all parts
// in order so can get the ETag of full object
return async.forEachOfSeries(dataLocator,
// eslint-disable-next-line prefer-arrow-callback
function copyPart(part, index, cb) {
if (aborted === true) {
return cb(errors.InternalError.customizeDescription(
'aborted'));
}
if (part.dataStoreType === 'azure') {
const passThrough = new PassThrough();
return async.parallel([

View File

@ -914,7 +914,8 @@ describe('Multipart Upload API with AWS Backend', function mpuTestSuite() {
(uploadId, copyObjectKey, next) => {
const copyParams =
getPartParams(objectKey, uploadId, 3);
objectPutCopyPart(authInfo, copyParams, bucketName,
const req = new DummyRequest(copyParams, undefined);
objectPutCopyPart(authInfo, req, bucketName,
copyObjectKey, undefined, log, err => {
next(err, uploadId);
});