Compare commits

...

1 Commits

Author SHA1 Message Date
Dora Korpar cc17fc461d bf: fix azureclient scope handling 2017-10-23 12:41:31 -07:00
1 changed files with 10 additions and 9 deletions

View File

@ -91,8 +91,8 @@ azureMpuUtils.getSubPartIds = (part, uploadId) =>
[...Array(part.numberSubParts).keys()].map(subPartIndex =>
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName,
log, cb) => {
azureMpuUtils.putSinglePart = (that, errorWrapperFn, request, params,
dataStoreName, log, cb) => {
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
= params;
const totalSubParts = 1;
@ -104,7 +104,7 @@ log, cb) => {
options.transactionalContentMD5 = contentMD5;
}
request.pipe(passThrough);
return errorWrapperFn('uploadPart', 'createBlockFromStream',
return errorWrapperFn(that, 'uploadPart', 'createBlockFromStream',
[blockId, bucketName, objectKey, passThrough, size, options,
(err, result) => {
if (err) {
@ -128,7 +128,7 @@ log, cb) => {
}], log, cb);
};
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo,
azureMpuUtils.putNextSubPart = (that, errorWrapperFn, partParams, subPartInfo,
subPartStream, subPartIndex, resultsCollector, log, cb) => {
const { uploadId, partNumber, bucketName, objectKey } = partParams;
const subPartSize = azureMpuUtils.getSubPartSize(
@ -136,12 +136,12 @@ subPartStream, subPartIndex, resultsCollector, log, cb) => {
const subPartId = azureMpuUtils.getBlockId(uploadId, partNumber,
subPartIndex);
resultsCollector.pushOp();
errorWrapperFn('uploadPart', 'createBlockFromStream',
errorWrapperFn(that, 'uploadPart', 'createBlockFromStream',
[subPartId, bucketName, objectKey, subPartStream, subPartSize,
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
};
azureMpuUtils.putSubParts = (errorWrapperFn, request, params,
azureMpuUtils.putSubParts = (that, errorWrapperFn, request, params,
dataStoreName, log, cb) => {
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
const resultsCollector = new ResultsCollector();
@ -190,7 +190,7 @@ dataStoreName, log, cb) => {
const currentStream = streamInterface.getCurrentStream();
// start first put to Azure before we start streaming the data
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
azureMpuUtils.putNextSubPart(that, errorWrapperFn, params, subPartInfo,
currentStream, 0, resultsCollector, log, cb);
request.pipe(hashedStream);
@ -211,8 +211,9 @@ dataStoreName, log, cb) => {
}
const { nextStream, subPartIndex } =
streamInterface.transitionToNextStream();
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo,
nextStream, subPartIndex, resultsCollector, log, cb);
azureMpuUtils.putNextSubPart(that, errorWrapperFn, params,
subPartInfo, nextStream, subPartIndex, resultsCollector,
log, cb);
streamInterface.write(firstChunk);
} else {
streamInterface.write(data);