Compare commits

...

2 Commits

Author SHA1 Message Date
Nicolas Humbert a017f2592f fix 2017-10-23 15:03:28 -07:00
Nicolas Humbert ecfc23aeb1 BF: try/catch 2017-10-23 14:45:50 -07:00
1 changed files with 30 additions and 29 deletions

View File

@ -91,7 +91,8 @@ azureMpuUtils.getSubPartIds = (part, uploadId) =>
[...Array(part.numberSubParts).keys()].map(subPartIndex =>
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
azureMpuUtils.putSinglePart = (that, errorWrapper, request, params,
dataStoreName,
log, cb) => {
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
= params;
@ -104,7 +105,7 @@ log, cb) => {
options.transactionalContentMD5 = contentMD5;
}
request.pipe(passThrough);
return errorWrapperFn('uploadPart', 'createBlockFromStream',
return that[errorWrapper]('uploadPart', 'createBlockFromStream',
[blockId, bucketName, objectKey, passThrough, size, options,
(err, result) => {
if (err) {
@ -128,7 +129,7 @@ log, cb) => {
}], log, cb);
};
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
azureMpuUtils.putNextSubPart = (that, errorWrapper, partParams, subPartInfo,
subPartStream, subPartIndex, resultsCollector, log, cb) => {
const { uploadId, partNumber, bucketName, objectKey } = partParams;
const subPartSize = azureMpuUtils.getSubPartSize(
@ -136,12 +137,12 @@ subPartStream, subPartIndex, resultsCollector, log, cb) => {
const subPartId = azureMpuUtils.getBlockId(uploadId, partNumber,
subPartIndex);
resultsCollector.pushOp();
errorWrapperFn('uploadPart', 'createBlockFromStream',
that[errorWrapper]('uploadPart', 'createBlockFromStream',
[subPartId, bucketName, objectKey, subPartStream, subPartSize,
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
};
azureMpuUtils.putSubParts = (errorWrapperFn, request, params,
azureMpuUtils.putSubParts = (that, errorWrapper, request, params,
dataStoreName, log, cb) => {
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
const resultsCollector = new ResultsCollector();
@ -190,7 +191,7 @@ dataStoreName, log, cb) => {
const currentStream = streamInterface.getCurrentStream();
// start first put to Azure before we start streaming the data
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
azureMpuUtils.putNextSubPart(that, errorWrapper, params, subPartInfo,
currentStream, 0, resultsCollector, log, cb);
request.pipe(hashedStream);
@ -211,8 +212,8 @@ dataStoreName, log, cb) => {
}
const { nextStream, subPartIndex } =
streamInterface.transitionToNextStream();
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
nextStream, subPartIndex, resultsCollector, log, cb);
azureMpuUtils.putNextSubPart(that, errorWrapper, params,
subPartInfo, nextStream, subPartIndex, resultsCollector, log, cb);
streamInterface.write(firstChunk);
} else {
streamInterface.write(data);