Compare commits

..

No commits in common. "a017f2592faf16b9ce4da70d9be98bda8566f356" and "4601794d49216a4bfb0fe2474fc58c301a2f0c74" have entirely different histories.

1 changed files with 29 additions and 30 deletions

View File

@ -91,8 +91,7 @@ azureMpuUtils.getSubPartIds = (part, uploadId) =>
[...Array(part.numberSubParts).keys()].map(subPartIndex =>
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
azureMpuUtils.putSinglePart = (that, errorWrapper, request, params,
dataStoreName,
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
log, cb) => {
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
= params;
@ -105,7 +104,7 @@ log, cb) => {
options.transactionalContentMD5 = contentMD5;
}
request.pipe(passThrough);
return that[errorWrapper]('uploadPart', 'createBlockFromStream',
return errorWrapperFn('uploadPart', 'createBlockFromStream',
[blockId, bucketName, objectKey, passThrough, size, options,
(err, result) => {
if (err) {
@ -129,7 +128,7 @@ log, cb) => {
}], log, cb);
};
azureMpuUtils.putNextSubPart = (that, errorWrapper, partParams, subPartInfo,
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
subPartStream, subPartIndex, resultsCollector, log, cb) => {
const { uploadId, partNumber, bucketName, objectKey } = partParams;
const subPartSize = azureMpuUtils.getSubPartSize(
@ -137,12 +136,12 @@ subPartStream, subPartIndex, resultsCollector, log, cb) => {
const subPartId = azureMpuUtils.getBlockId(uploadId, partNumber,
subPartIndex);
resultsCollector.pushOp();
that[errorWrapper]('uploadPart', 'createBlockFromStream',
errorWrapperFn('uploadPart', 'createBlockFromStream',
[subPartId, bucketName, objectKey, subPartStream, subPartSize,
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
};
azureMpuUtils.putSubParts = (that, errorWrapper, request, params,
azureMpuUtils.putSubParts = (errorWrapperFn, request, params,
dataStoreName, log, cb) => {
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
const resultsCollector = new ResultsCollector();
@ -191,7 +190,7 @@ dataStoreName, log, cb) => {
const currentStream = streamInterface.getCurrentStream();
// start first put to Azure before we start streaming the data
azureMpuUtils.putNextSubPart(that, errorWrapper, params, subPartInfo,
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
currentStream, 0, resultsCollector, log, cb);
request.pipe(hashedStream);
@ -212,8 +211,8 @@ dataStoreName, log, cb) => {
}
const { nextStream, subPartIndex } =
streamInterface.transitionToNextStream();
azureMpuUtils.putNextSubPart(that, errorWrapper, params,
subPartInfo, nextStream, subPartIndex, resultsCollector, log, cb);
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
nextStream, subPartIndex, resultsCollector, log, cb);
streamInterface.write(firstChunk);
} else {
streamInterface.write(data);