Compare commits

...

1 Commits

Author SHA1 Message Date
Nicolas Humbert f78b2eb8a5 Storage class name + init mpu 2022-05-25 17:11:34 -04:00
10 changed files with 187 additions and 80 deletions

View File

@ -1,5 +1,5 @@
const async = require('async');
const { errors, s3middleware } = require('arsenal');
const { errors, s3middleware, models } = require('arsenal');
const getMetaHeaders = s3middleware.userMetadata.getMetaHeaders;
const constants = require('../../../../constants');
@ -16,6 +16,8 @@ const validateWebsiteHeader = require('./websiteServing')
.validateWebsiteHeader;
const applyZenkoUserMD = require('./applyZenkoUserMD');
const { externalBackends, versioningNotImplBackends } = constants;
const { BackendInfo } = models;
const { isValidLocationConstraint } = BackendInfo;
const externalVersioningErrorMessage = 'We do not currently support putting ' +
'a versioned object to a location-constraint of type Azure or GCP.';
@ -61,7 +63,7 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
canonicalID, cipherBundle, request, isDeleteMarker, streamingV4Params,
log, callback) {
const putVersionId = request.headers['x-scal-s3-version-id'];
const isPutVersion = putVersionId || putVersionId === '';
const isPutVersion = !!putVersionId || putVersionId === '';
const size = isDeleteMarker ? 0 : request.parsedContentLength;
// although the request method may actually be 'DELETE' if creating a
@ -160,18 +162,36 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
metadataStoreParams.originOp = 's3:ObjectRemoved:DeleteMarkerCreated';
}
const backendInfoObj =
locationConstraintCheck(request, null, bucketMD, log);
if (backendInfoObj.err) {
return process.nextTick(() => {
callback(backendInfoObj.err);
});
let location;
let locationType;
let backendInfo;
if (isPutVersion) {
location = objMD.archive.archiveInfo.hotLocation
const valid = isValidLocationConstraint(config, location, log);
if (!valid) {
return process.nextTick(() =>
callback(errors.InvalidArgument.customizeDescription('Object Location Error')));
}
backendInfo = new BackendInfo(config, location);
locationType = config.getLocationConstraintType(location);
} else {
const backendInfoObj =
locationConstraintCheck(request, null, bucketMD, log);
if (backendInfoObj.err) {
return process.nextTick(() => {
callback(backendInfoObj.err);
});
}
backendInfo = backendInfoObj.backendInfo;
location = backendInfo.getControllingLocationConstraint();
locationType = backendInfoObj.defaultedToDataBackend ? location :
config.getLocationConstraintType(location);
}
const backendInfo = backendInfoObj.backendInfo;
const location = backendInfo.getControllingLocationConstraint();
const locationType = backendInfoObj.defaultedToDataBackend ? location :
config.getLocationConstraintType(location);
metadataStoreParams.dataStoreName = location;
if (versioningNotImplBackends[locationType]) {

View File

@ -71,32 +71,10 @@ const REPLICATION_ACTION = 'MPU';
*/
function completeMultipartUpload(authInfo, request, log, callback) {
log.debug('processing request', { method: 'completeMultipartUpload' });
const bucketName = request.bucketName;
const objectKey = request.objectKey;
const hostname = request.parsedHost;
const uploadId = request.query.uploadId;
const metadataValParams = {
authInfo,
bucketName,
objectKey,
uploadId,
// Note: permissions for completing a multipart upload are the
// same as putting a part.
requestType: 'putPart or complete',
log,
request,
};
const xmlParams = {
bucketName,
objectKey,
hostname,
};
let oldByteLength = null;
const responseHeaders = {};
let versionId;
const putVersionId = request.headers['x-scal-s3-version-id'];
const isPutVersion = putVersionId || putVersionId === '';
const isPutVersion = !!putVersionId || putVersionId === '';
if (putVersionId) {
const decodedVidResult = decodeVID(putVersionId);
if (decodedVidResult instanceof Error) {
@ -109,6 +87,31 @@ function completeMultipartUpload(authInfo, request, log, callback) {
versionId = decodedVidResult;
}
const bucketName = request.bucketName;
const objectKey = request.objectKey;
const hostname = request.parsedHost;
const uploadId = request.query.uploadId;
const metadataValMPUParams = {
authInfo,
bucketName,
objectKey,
uploadId,
// Note: permissions for completing a multipart upload are the
// same as putting a part.
requestType: 'putPart or complete',
log,
request,
isPutVersion,
versionId,
};
const xmlParams = {
bucketName,
objectKey,
hostname,
};
let oldByteLength = null;
const responseHeaders = {};
const queryContainsVersionId = checkQueryVersionId(request.query);
if (queryContainsVersionId instanceof Error) {
return callback(queryContainsVersionId);
@ -134,7 +137,6 @@ function completeMultipartUpload(authInfo, request, log, callback) {
// Required permissions for this action
// at the destinationBucket level are same as objectPut
requestType: 'objectPut',
versionId,
};
metadataValidateBucketAndObj(metadataValParams, log, next);
},
@ -142,30 +144,7 @@ function completeMultipartUpload(authInfo, request, log, callback) {
if (objMD) {
oldByteLength = objMD['content-length'];
}
if (isPutVersion) {
if (!objMD) {
const err = putVersionId ? errors.NoSuchVersion : errors.NoSuchKey;
log.error('error no object metadata found', { method: 'completeMultipartUpload', putVersionId });
return callback(err);
}
if (objMD.isDeleteMarker) {
log.error('version is a delete marker', { method: 'completeMultipartUpload', putVersionId });
return callback(errors.MethodNotAllowed);
}
// make sure object archive restoration is in progress
// NOTE: we do not use putObjectVersion to update the restoration period.
if (!objMD.archive || !objMD.archive.restoreRequestedAt || !objMD.archive.restoreRequestedDays
|| objMD.archive.restoreCompletedAt || objMD.archive.restoreWillExpireAt) {
log.error('object archive restoration is not in progress',
{ method: 'completeMultipartUpload', putVersionId });
return callback(errors.InvalidObjectState);
}
}
return services.metadataValidateMultipart(metadataValParams,
return services.metadataValidateMultipart(metadataValMPUParams,
(err, mpuBucket, mpuOverview, storedMetadata) => {
if (err) {
return next(err, destBucket);

View File

@ -21,6 +21,7 @@ const { validateHeaders, compareObjectLockInformation } =
require('./apiUtils/object/objectLockHelpers');
const { getObjectSSEConfiguration } = require('./apiUtils/bucket/bucketEncryption');
const { setExpirationHeaders } = require('./apiUtils/object/expirationHeaders');
const { decodeVID } = require('./apiUtils/object/versioning');
/*
Sample xml response:
@ -47,6 +48,22 @@ Sample xml response:
*/
function initiateMultipartUpload(authInfo, request, log, callback) {
log.debug('processing request', { method: 'initiateMultipartUpload' });
let versionId;
const putVersionId = request.headers['x-scal-s3-version-id'];
const isPutVersion = !!putVersionId || putVersionId === '';
if (putVersionId) {
const decodedVidResult = decodeVID(putVersionId);
if (decodedVidResult instanceof Error) {
log.trace('invalid x-scal-s3-version-id header', {
versionId: putVersionId,
error: decodedVidResult,
});
return process.nextTick(() => callback(decodedVidResult));
}
versionId = decodedVidResult;
}
const bucketName = request.bucketName;
const objectKey = request.objectKey;
@ -107,6 +124,7 @@ function initiateMultipartUpload(authInfo, request, log, callback) {
// Required permissions for this action are same as objectPut
requestType: 'objectPut',
request,
versionId,
};
const accountCanonicalID = authInfo.getCanonicalID();
let initiatorID = accountCanonicalID;
@ -134,6 +152,7 @@ function initiateMultipartUpload(authInfo, request, log, callback) {
// Otherwise, it is the same as the ownerDisplayName.
initiatorDisplayName,
splitter: constants.splitter,
versionId,
};
const tagging = request.headers['x-amz-tagging'];
if (tagging) {
@ -275,7 +294,31 @@ function initiateMultipartUpload(authInfo, request, log, callback) {
async.waterfall([
next => metadataValidateBucketAndObj(metadataValParams, log,
(error, destinationBucket) => {
(error, destinationBucket, objMD) => {
if (isPutVersion) {
if (!objMD) {
const err = putVersionId ? errors.NoSuchVersion : errors.NoSuchKey;
log.error('error no object metadata found', { method: 'initiateMultipartUpload', putVersionId });
return callback(err);
}
if (objMD.isDeleteMarker) {
log.error('version is a delete marker', { method: 'initiateMultipartUpload', putVersionId });
return callback(errors.MethodNotAllowed);
}
// TODO check object is in cold storage location
// make sure object archive restoration is in progress
// NOTE: we do not use putObjectVersion to update the restoration period.
if (!objMD.archive || !objMD.archive.restoreRequestedAt || !objMD.archive.restoreRequestedDays
|| objMD.archive.restoreCompletedAt || objMD.archive.restoreWillExpireAt) {
log.error('object archive restoration is not in progress',
{ method: 'initiateMultipartUpload', putVersionId });
return callback(errors.InvalidObjectState);
}
}
const corsHeaders = collectCorsHeaders(request.headers.origin, request.method, destinationBucket);
if (error) {
log.debug('error processing request', {

View File

@ -43,7 +43,7 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
log.debug('processing request', { method: 'objectPut' });
const putVersionId = request.headers['x-scal-s3-version-id'];
const isPutVersion = putVersionId || putVersionId === '';
const isPutVersion = !!putVersionId || putVersionId === '';
let versionId;
@ -139,8 +139,12 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
// make sure object archive restoration is in progress
// NOTE: we do not use putObjectVersion to update the restoration period.
if (!objMD.archive || !objMD.archive.restoreRequestedAt || !objMD.archive.restoreRequestedDays
|| objMD.archive.restoreCompletedAt || objMD.archive.restoreWillExpireAt) {
const isRestoreReady = !objMD.archive || !objMD.archive.archiveInfo
|| !objMD.archive.archiveInfo.hotLocation
|| !objMD.archive.restoreRequestedAt || !objMD.archive.restoreRequestedDays
|| objMD.archive.restoreCompletedAt || objMD.archive.restoreWillExpireAt;
if (isRestoreReady) {
log.error('object archive restoration is not in progress', { method: 'objectPut', putVersionId });
return callback(errors.InvalidObjectState);
}

View File

@ -22,6 +22,7 @@ const { BackendInfo } = models;
const writeContinue = require('../utilities/writeContinue');
const { getObjectSSEConfiguration } = require('./apiUtils/bucket/bucketEncryption');
const skipError = new Error('skip');
const { decodeVID } = require('./apiUtils/object/versioning');
// We pad the partNumbers so that the parts will be sorted in numerical order.
function _getPaddedPartNumber(number) {
@ -57,6 +58,22 @@ function _getPartKey(uploadId, splitter, paddedPartNumber) {
function objectPutPart(authInfo, request, streamingV4Params, log,
cb) {
log.debug('processing request', { method: 'objectPutPart' });
let versionId;
const putVersionId = request.headers['x-scal-s3-version-id'];
const isPutVersion = !!putVersionId || putVersionId === '';
if (putVersionId) {
const decodedVidResult = decodeVID(putVersionId);
if (decodedVidResult instanceof Error) {
log.trace('invalid x-scal-s3-version-id header', {
versionId: putVersionId,
error: decodedVidResult,
});
return process.nextTick(() => callback(decodedVidResult));
}
versionId = decodedVidResult;
}
const size = request.parsedContentLength;
if (Number.parseInt(size, 10) > constants.maximumAllowedPartSize) {
@ -179,6 +196,28 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
});
return next(err, destinationBucket);
}
console.log('res.versionId!!!!', res.versionId);
console.log('versionId!!!', versionId);
if (isPutVersion && res.versionId !== versionId) {
log.error('x-scal-s3-version-id header is incorrect', {
initiateMPUVersionId: res.versionId,
putPartVersionId: versionId,
method: 'objectPutPart::metadata.getObjectMD',
});
// TODO: probably not the correct error.
return next(errors.NoSuchUpload, destinationBucket);
}
if (!isPutVersion && res.versionId) {
log.error('missing the x-scal-s3-version-id header', {
initiateMPUVersionId: res.versionId,
method: 'objectPutPart::metadata.getObjectMD',
});
return next(errors.NoSuchUpload, destinationBucket);
}
const initiatorID = res.initiator.ID;
const requesterID = authInfo.isRequesterAnIAMUser() ?
authInfo.getArn() : authInfo.getCanonicalID();

View File

@ -416,6 +416,8 @@ const services = {
}
multipartObjectMD.controllingLocationConstraint =
params.controllingLocationConstraint;
multipartObjectMD.versionId =
params.versionId;
multipartObjectMD.dataStoreName = params.dataStoreName;
if (params.tagging) {
const validationTagRes = parseTagFromQuery(params.tagging);
@ -459,6 +461,7 @@ const services = {
return cb(err);
}
multipartObjectMD.acl = parsedACL;
console.log('multipartObjectMD!!!', multipartObjectMD);
metadata.putObjectMD(bucketName, longMPUIdentifier,
multipartObjectMD, {}, log, err => {
if (err) {
@ -562,7 +565,7 @@ const services = {
*/
metadataValidateMultipart(params, cb) {
const { bucketName, uploadId, authInfo,
objectKey, requestType, log } = params;
objectKey, requestType, isPutVersion, versionId, log } = params;
assert.strictEqual(typeof bucketName, 'string');
// This checks whether the mpu bucket exists.
@ -598,6 +601,20 @@ const services = {
return cb(err);
}
if (isPutVersion && storedMetadata.versionId !== versionId) {
log.error('x-scal-s3-version-id header is incorrect', {
initiateMPUVersionId: storedMetadata.versionId,
putPartVersionId: versionId,
method: 'objectPutPart::metadata.getObjectMD',
});
// TODO: probably not the correct error.
return cb(errors.NoSuchUpload);
}
// No need to check if (!isPutVersion && storedMetadata.versionId) since
// the user should be able to list, abort or complete MPU without the need
// of specifying the x-scal-s3-version-id header.
const initiatorID = storedMetadata.initiator.ID;
const ownerID = storedMetadata['owner-id'];
const mpuOverview = {

View File

@ -71,7 +71,7 @@
},
"scripts": {
"cloudserver": "S3METADATA=mongodb npm-run-all --parallel start_dataserver start_s3server",
"ft_awssdk": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/",
"ft_awssdk": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/object/mpuVersion.js",
"ft_awssdk_aws": "cd tests/functional/aws-node-sdk && AWS_ON_AIR=true mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/",
"ft_awssdk_buckets": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/bucket",
"ft_awssdk_objects_misc": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/legacy test/object test/service test/support",

View File

@ -10,7 +10,7 @@ const { getMetadata, fakeMetadataRestore } = require('../utils/init');
const log = new DummyRequestLogger();
const bucketName = 'bucket1putversion33';
const bucketName = 'bucket1putversion34';
const objectName = 'object1putversion';
const mdListingParams = { listingType: 'DelimiterVersions', maxKeys: 1000 };
const archive = {
@ -208,7 +208,7 @@ describe('MPU with x-scal-s3-version-id header', () => {
});
});
it('should overwrite a version', done => {
it.only('should overwrite a version', done => {
const vParams = {
Bucket: bucketName,
VersioningConfiguration: {

View File

@ -13,8 +13,11 @@ const log = new DummyRequestLogger();
const bucketName = 'bucket1putversion32';
const objectName = 'object1putversion';
const mdListingParams = { listingType: 'DelimiterVersions', maxKeys: 1000 };
const hotLocation = 'us-east-2';
const archive = {
archiveInfo: {},
archiveInfo: {
hotLocation,
},
restoreRequestedAt: new Date(0).toString(),
restoreRequestedDays: 5,
};
@ -112,7 +115,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);
checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length', 'content-md5',
'microVersionId', 'x-amz-restore', 'archive']);
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
@ -163,7 +166,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);
checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length',
'content-md5', 'microVersionId', 'x-amz-restore', 'archive']);
'content-md5', 'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
@ -214,7 +217,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);
checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length',
'content-md5', 'microVersionId', 'x-amz-restore', 'archive']);
'content-md5', 'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
@ -327,7 +330,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);
checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length',
'content-md5', 'microVersionId', 'x-amz-restore', 'archive']);
'content-md5', 'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
@ -379,7 +382,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);
checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length',
'content-md5', 'microVersionId', 'x-amz-restore', 'archive']);
'content-md5', 'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
@ -434,7 +437,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);
checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length',
'content-md5', 'microVersionId', 'x-amz-restore', 'archive']);
'content-md5', 'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
@ -487,7 +490,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);
checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length',
'content-md5', 'microVersionId', 'x-amz-restore', 'archive']);
'content-md5', 'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
@ -539,7 +542,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);
checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length',
'content-md5', 'microVersionId', 'x-amz-restore', 'archive']);
'content-md5', 'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
@ -598,7 +601,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);
checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length',
'content-md5', 'microVersionId', 'x-amz-restore', 'archive']);
'content-md5', 'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
@ -645,7 +648,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);
checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length',
'content-md5', 'microVersionId', 'x-amz-restore', 'archive']);
'content-md5', 'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
@ -742,6 +745,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(objMDAfter.archive.restoreRequestedAt, objMDBefore.archive.restoreRequestedAt);
assert.deepStrictEqual(objMDAfter.archive.restoreRequestedDays,
objMDBefore.archive.restoreRequestedDays);
assert.deepStrictEqual(objMDAfter.archive.archiveInfo.hotLocation, hotLocation);
assert.deepStrictEqual(objMDAfter['x-amz-restore']['ongoing-request'], false);
assert(objMDAfter.archive.restoreCompletedAt);

View File

@ -50,6 +50,7 @@ function fakeMetadataRestore(bucketName, objectName, versionId, archive, cb) {
}
// eslint-disable-next-line no-param-reassign
objMD.archive = archive;
objMD.dataStoreName = 'location-dmf-v1';
return metadata.putObjectMD(bucketName, objectName, objMD, { versionId: decodedVersionId },
log, err => cb(err));
});