Compare commits

...

1 Commits

Author SHA1 Message Date
Dora Korpar cc17fc461d bf: fix azureclient scope handling 2017-10-23 12:41:31 -07:00
1 changed files with 10 additions and 9 deletions

View File

@ -91,8 +91,8 @@ azureMpuUtils.getSubPartIds = (part, uploadId) =>
[...Array(part.numberSubParts).keys()].map(subPartIndex => [...Array(part.numberSubParts).keys()].map(subPartIndex =>
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex)); azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName, azureMpuUtils.putSinglePart = (that, errorWrapperFn, request, params,
log, cb) => { dataStoreName, log, cb) => {
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId } const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
= params; = params;
const totalSubParts = 1; const totalSubParts = 1;
@ -104,7 +104,7 @@ log, cb) => {
options.transactionalContentMD5 = contentMD5; options.transactionalContentMD5 = contentMD5;
} }
request.pipe(passThrough); request.pipe(passThrough);
return errorWrapperFn('uploadPart', 'createBlockFromStream', return errorWrapperFn(that, 'uploadPart', 'createBlockFromStream',
[blockId, bucketName, objectKey, passThrough, size, options, [blockId, bucketName, objectKey, passThrough, size, options,
(err, result) => { (err, result) => {
if (err) { if (err) {
@ -128,7 +128,7 @@ log, cb) => {
}], log, cb); }], log, cb);
}; };
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo, azureMpuUtils.putNextSubPart = (that, errorWrapperFn, partParams, subPartInfo,
subPartStream, subPartIndex, resultsCollector, log, cb) => { subPartStream, subPartIndex, resultsCollector, log, cb) => {
const { uploadId, partNumber, bucketName, objectKey } = partParams; const { uploadId, partNumber, bucketName, objectKey } = partParams;
const subPartSize = azureMpuUtils.getSubPartSize( const subPartSize = azureMpuUtils.getSubPartSize(
@ -136,12 +136,12 @@ subPartStream, subPartIndex, resultsCollector, log, cb) => {
const subPartId = azureMpuUtils.getBlockId(uploadId, partNumber, const subPartId = azureMpuUtils.getBlockId(uploadId, partNumber,
subPartIndex); subPartIndex);
resultsCollector.pushOp(); resultsCollector.pushOp();
errorWrapperFn('uploadPart', 'createBlockFromStream', errorWrapperFn(that, 'uploadPart', 'createBlockFromStream',
[subPartId, bucketName, objectKey, subPartStream, subPartSize, [subPartId, bucketName, objectKey, subPartStream, subPartSize,
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb); {}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
}; };
azureMpuUtils.putSubParts = (errorWrapperFn, request, params, azureMpuUtils.putSubParts = (that, errorWrapperFn, request, params,
dataStoreName, log, cb) => { dataStoreName, log, cb) => {
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size); const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
const resultsCollector = new ResultsCollector(); const resultsCollector = new ResultsCollector();
@ -190,7 +190,7 @@ dataStoreName, log, cb) => {
const currentStream = streamInterface.getCurrentStream(); const currentStream = streamInterface.getCurrentStream();
// start first put to Azure before we start streaming the data // start first put to Azure before we start streaming the data
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo, azureMpuUtils.putNextSubPart(that, errorWrapperFn, params, subPartInfo,
currentStream, 0, resultsCollector, log, cb); currentStream, 0, resultsCollector, log, cb);
request.pipe(hashedStream); request.pipe(hashedStream);
@ -211,8 +211,9 @@ dataStoreName, log, cb) => {
} }
const { nextStream, subPartIndex } = const { nextStream, subPartIndex } =
streamInterface.transitionToNextStream(); streamInterface.transitionToNextStream();
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo, azureMpuUtils.putNextSubPart(that, errorWrapperFn, params,
nextStream, subPartIndex, resultsCollector, log, cb); subPartInfo, nextStream, subPartIndex, resultsCollector,
log, cb);
streamInterface.write(firstChunk); streamInterface.write(firstChunk);
} else { } else {
streamInterface.write(data); streamInterface.write(data);