Compare commits

...

1 Commits

Author SHA1 Message Date
alexandre merle f98cc088a5 S3C-3949: Abort Copy object when request aborted
Aborting the remainings copy of objects if the client
request is aborted, which will prevent copyObject to stack on each
others and perfoming a DOS of sproxyd connections in case of retry.
2021-02-22 16:52:30 +01:00
2 changed files with 24 additions and 2 deletions

View File

@ -506,6 +506,12 @@ const data = {
}); });
} }
let aborted = false;
request.once('aborted', () => {
aborted = true;
});
// dataLocator is an array. need to get and put all parts // dataLocator is an array. need to get and put all parts
// For now, copy 1 part at a time. Could increase the second // For now, copy 1 part at a time. Could increase the second
// argument here to increase the number of parts // argument here to increase the number of parts
@ -513,6 +519,10 @@ const data = {
return async.mapLimit(dataLocator, 1, return async.mapLimit(dataLocator, 1,
// eslint-disable-next-line prefer-arrow-callback // eslint-disable-next-line prefer-arrow-callback
function copyPart(part, copyCb) { function copyPart(part, copyCb) {
if (aborted === true) {
return copyCb(errors.InternalError.customizeDescription(
'aborted'));
}
if (part.dataStoreType === 'azure') { if (part.dataStoreType === 'azure') {
const passThrough = new PassThrough(); const passThrough = new PassThrough();
return async.parallel([ return async.parallel([
@ -538,7 +548,7 @@ const data = {
}); });
}, (err, results) => { }, (err, results) => {
if (err) { if (err) {
log.debug('error transferring data from source', log.error('error transferring data from source',
{ error: err }); { error: err });
return cb(err); return cb(err);
} }
@ -749,11 +759,22 @@ const data = {
// to collect the md5 from multiple streams // to collect the md5 from multiple streams
let totalHash; let totalHash;
const locations = []; const locations = [];
let aborted = false;
request.once('aborted', () => {
aborted = true;
});
// dataLocator is an array. need to get and put all parts // dataLocator is an array. need to get and put all parts
// in order so can get the ETag of full object // in order so can get the ETag of full object
return async.forEachOfSeries(dataLocator, return async.forEachOfSeries(dataLocator,
// eslint-disable-next-line prefer-arrow-callback // eslint-disable-next-line prefer-arrow-callback
function copyPart(part, index, cb) { function copyPart(part, index, cb) {
if (aborted === true) {
return cb(errors.InternalError.customizeDescription(
'aborted'));
}
if (part.dataStoreType === 'azure') { if (part.dataStoreType === 'azure') {
const passThrough = new PassThrough(); const passThrough = new PassThrough();
return async.parallel([ return async.parallel([

View File

@ -914,7 +914,8 @@ describe('Multipart Upload API with AWS Backend', function mpuTestSuite() {
(uploadId, copyObjectKey, next) => { (uploadId, copyObjectKey, next) => {
const copyParams = const copyParams =
getPartParams(objectKey, uploadId, 3); getPartParams(objectKey, uploadId, 3);
objectPutCopyPart(authInfo, copyParams, bucketName, const req = new DummyRequest(copyParams, undefined);
objectPutCopyPart(authInfo, req, bucketName,
copyObjectKey, undefined, log, err => { copyObjectKey, undefined, log, err => {
next(err, uploadId); next(err, uploadId);
}); });