Compare commits

...

2 Commits

Author SHA1 Message Date
Nicolas Humbert a017f2592f fix 2017-10-23 15:03:28 -07:00
Nicolas Humbert ecfc23aeb1 BF: try/catch 2017-10-23 14:45:50 -07:00
1 changed files with 30 additions and 29 deletions

View File

@ -91,7 +91,8 @@ azureMpuUtils.getSubPartIds = (part, uploadId) =>
[...Array(part.numberSubParts).keys()].map(subPartIndex => [...Array(part.numberSubParts).keys()].map(subPartIndex =>
azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex)); azureMpuUtils.getBlockId(uploadId, part.partNumber, subPartIndex));
azureMpuUtils.putSinglePart = (errorWrapperFn, request, params, dataStoreName, azureMpuUtils.putSinglePart = (that, errorWrapper, request, params,
dataStoreName,
log, cb) => { log, cb) => {
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId } const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
= params; = params;
@ -104,31 +105,31 @@ log, cb) => {
options.transactionalContentMD5 = contentMD5; options.transactionalContentMD5 = contentMD5;
} }
request.pipe(passThrough); request.pipe(passThrough);
return errorWrapperFn('uploadPart', 'createBlockFromStream', return that[errorWrapper]('uploadPart', 'createBlockFromStream',
[blockId, bucketName, objectKey, passThrough, size, options, [blockId, bucketName, objectKey, passThrough, size, options,
(err, result) => { (err, result) => {
if (err) { if (err) {
log.error('Error from Azure data backend uploadPart', log.error('Error from Azure data backend uploadPart',
{ error: err.message, dataStoreName }); { error: err.message, dataStoreName });
if (err.code === 'ContainerNotFound') { if (err.code === 'ContainerNotFound') {
return cb(errors.NoSuchBucket); return cb(errors.NoSuchBucket);
} }
if (err.code === 'InvalidMd5') { if (err.code === 'InvalidMd5') {
return cb(errors.InvalidDigest); return cb(errors.InvalidDigest);
} }
if (err.code === 'Md5Mismatch') { if (err.code === 'Md5Mismatch') {
return cb(errors.BadDigest); return cb(errors.BadDigest);
} }
return cb(errors.InternalError.customizeDescription( return cb(errors.InternalError.customizeDescription(
`Error returned from Azure: ${err.message}`) `Error returned from Azure: ${err.message}`)
); );
} }
const eTag = objectUtils.getHexMD5(result.headers['content-md5']); const eTag = objectUtils.getHexMD5(result.headers['content-md5']);
return cb(null, eTag, totalSubParts, size); return cb(null, eTag, totalSubParts, size);
}], log, cb); }], log, cb);
}; };
azureMpuUtils.putNextSubPart = (errorWrapperFn, partParams, subPartInfo, azureMpuUtils.putNextSubPart = (that, errorWrapper, partParams, subPartInfo,
subPartStream, subPartIndex, resultsCollector, log, cb) => { subPartStream, subPartIndex, resultsCollector, log, cb) => {
const { uploadId, partNumber, bucketName, objectKey } = partParams; const { uploadId, partNumber, bucketName, objectKey } = partParams;
const subPartSize = azureMpuUtils.getSubPartSize( const subPartSize = azureMpuUtils.getSubPartSize(
@ -136,12 +137,12 @@ subPartStream, subPartIndex, resultsCollector, log, cb) => {
const subPartId = azureMpuUtils.getBlockId(uploadId, partNumber, const subPartId = azureMpuUtils.getBlockId(uploadId, partNumber,
subPartIndex); subPartIndex);
resultsCollector.pushOp(); resultsCollector.pushOp();
errorWrapperFn('uploadPart', 'createBlockFromStream', that[errorWrapper]('uploadPart', 'createBlockFromStream',
[subPartId, bucketName, objectKey, subPartStream, subPartSize, [subPartId, bucketName, objectKey, subPartStream, subPartSize,
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb); {}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
}; };
azureMpuUtils.putSubParts = (errorWrapperFn, request, params, azureMpuUtils.putSubParts = (that, errorWrapper, request, params,
dataStoreName, log, cb) => { dataStoreName, log, cb) => {
const subPartInfo = azureMpuUtils.getSubPartInfo(params.size); const subPartInfo = azureMpuUtils.getSubPartInfo(params.size);
const resultsCollector = new ResultsCollector(); const resultsCollector = new ResultsCollector();
@ -190,7 +191,7 @@ dataStoreName, log, cb) => {
const currentStream = streamInterface.getCurrentStream(); const currentStream = streamInterface.getCurrentStream();
// start first put to Azure before we start streaming the data // start first put to Azure before we start streaming the data
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo, azureMpuUtils.putNextSubPart(that, errorWrapper, params, subPartInfo,
currentStream, 0, resultsCollector, log, cb); currentStream, 0, resultsCollector, log, cb);
request.pipe(hashedStream); request.pipe(hashedStream);
@ -211,8 +212,8 @@ dataStoreName, log, cb) => {
} }
const { nextStream, subPartIndex } = const { nextStream, subPartIndex } =
streamInterface.transitionToNextStream(); streamInterface.transitionToNextStream();
azureMpuUtils.putNextSubPart(errorWrapperFn, params, subPartInfo, azureMpuUtils.putNextSubPart(that, errorWrapper, params,
nextStream, subPartIndex, resultsCollector, log, cb); subPartInfo, nextStream, subPartIndex, resultsCollector, log, cb);
streamInterface.write(firstChunk); streamInterface.write(firstChunk);
} else { } else {
streamInterface.write(data); streamInterface.write(data);