Compare commits

...

19 Commits

Author SHA1 Message Date
bert-e c4681e9a9f Merge branch 'bugfix/CLDSRV-100/cherry-pick_CLDSRV-66_7.4.9' into q/7.4.9.5 2022-01-17 21:50:33 +00:00
Ronnie Smith c34e3951d2
bugfix: CLDSRV-56 Do not encode tokens again
(cherry picked from commit 166bddd6dc)
2022-01-17 10:57:54 -08:00
Taylor McKinnon 6e4b5209c0 use version config instead
(cherry picked from commit 1364dc895d)
2022-01-17 10:37:06 -08:00
Taylor McKinnon ca4ab4039b bf(CLDSRV-66): Don't pass `oldByteLength` for completeMPU if versioned bucket
(cherry picked from commit 190a068828)
2022-01-17 10:37:02 -08:00
Jonathan Gramain bf905a204d CLDSRV-60 CLDSRV-53 [hotfix 7.4.9.4] bump arsenal hash 2021-12-08 15:58:37 -08:00
Jonathan Gramain 88d890f0c9 CLDSRV-60 address review: improve test self-doc
In versioning tests, add a 'description' field in test cases instead
of comments to make the description part of the test name

(cherry picked from commit 6d741d1312)
2021-12-08 11:53:51 -08:00
Jonathan Gramain b1a28f9aeb improvement: CLDSRV-60 cleanup replay IDs of null versions
Replace the previous way of passing replayId option to metadata DELETE
by the result of the preprocessingVersioningDelete function, since it
handles more cases like when a bucket with versioning suspended has to
delete the previous null version, in which case, the null version's
replay IDs have to be cleaned up.

(cherry picked from commit 2d44334a1f)
2021-12-08 11:53:51 -08:00
Jonathan Gramain 7d3f619490 improvement: CLDSRV-60 process upload IDs in versioning helpers
In versioning helpers, make sure we pass on the nullUploadId/replayId
to the metadataStoreObject() or deleteObject() function, so that they
can update ObjectMD and pass the appropriate replay options to metadata

(cherry picked from commit a6e1c2ec83)
2021-12-08 11:53:51 -08:00
Jonathan Gramain ed1d018808 CLDSRV-61 add JSDoc
Add JSDoc for processVersioningState() and getMasterState() versioning
helper functions

(cherry picked from commit 8f913101a6)
2021-12-08 11:53:51 -08:00
Jonathan Gramain 873c2d8164 improvement: CLDSRV-61 test preprocessingVersioningDelete
Add unit tests for preprocessingVersioningDelete helper

(cherry picked from commit 1ccd36f9bf)
2021-12-08 11:53:51 -08:00
Jonathan Gramain f269cc3daa improvement: CLDSRV-61 refactor and test processVersioningState
- add unit tests for processVersioningState() helper

- remove the callback argument, instead, return the list of parameters
  as an object, it simplifies and enhances testability

(cherry picked from commit 22f7f253a1)
2021-12-08 11:53:51 -08:00
Jonathan Gramain 45eca56c85 CLDSRV-53 pass replayId when completing/deleting an MPU
Pass the 'replayId' option to metadata when:

- completing an MPU, to allow metadata to check or write a replay key

- deleting an MPU, to allow metadata to delete the replay key

(cherry picked from commit 5fc53b3b95)
2021-12-08 11:53:51 -08:00
Jonathan Gramain 00ea3e799c bugfix: CLDSRV-30 remove old data keys on CRR target version overwrite
Remove data keys belonging to the old object when overwriting a CRR
target object version with a new object, to avoid creating orphaned
data keys.

The code checks whether the data keys have changed before removing the
old ones, in order to cope with replays of the PUT metadata operation
only. Similarly, metadata-only operations do not trigger any data key
removal.

(cherry picked from commit e7b7d28015)
2021-10-14 14:32:07 -07:00
Jonathan Gramain 7fa6b8b30b bugfix: CLDSRV-30 failing test
Add a failing test to show that old data locations are not properly
cleaned up on backbeat PUT metadata overwriting an existing version.

(cherry picked from commit 07ae7e492a)
2021-10-14 14:32:07 -07:00
Jonathan Gramain b6f2233239 refactor: CLDSRV-30 rename locationKeysSanityCheck to locationKeysHaveChanged
The function name now reflects what it is returning (a boolean) which
is clearer in the new context where it will be used. The jsdoc was
also improved and fixed.

(cherry picked from commit ba6765bea4)
2021-10-14 14:32:07 -07:00
Ronnie Smith 0260c33408
bugfix: CLDSRV-11 Revert checks and adjust tests
(cherry picked from commit cf742df7ae)
2021-08-19 16:02:31 -07:00
Ronnie Smith 74ddec79e3
bugfix: CLDSRV-11 Write ACL via predefined groups
(cherry picked from commit 330beed953)
2021-08-19 16:02:24 -07:00
naren-scality 95582aa579 bf CLDSRV-10 get acl with predefined groups
Refactor acl util

(cherry picked from commit 7d89a6f250)
2021-08-16 21:11:09 -07:00
naren-scality 54d29b61f0 bf CLDSRV-10 get acl with predefined groups
Corrected listing ACLs for an object or a bucket when predefined groups
are used.

(cherry picked from commit bca7a57fbf)
2021-08-16 21:11:00 -07:00
25 changed files with 1261 additions and 229 deletions

View File

@ -14,6 +14,7 @@ function isBucketAuthorized(bucket, requestType, canonicalID) {
// only bucket owner can modify or retrieve this property of a bucket
return false;
}
const bucketAcl = bucket.getAcl();
if (requestType === 'bucketGet' || requestType === 'bucketHead') {
if (bucketAcl.Canned === 'public-read'
@ -73,6 +74,13 @@ function isBucketAuthorized(bucket, requestType, canonicalID) {
|| bucketAcl.FULL_CONTROL.indexOf(canonicalID) > -1
|| bucketAcl.WRITE.indexOf(canonicalID) > -1) {
return true;
} else if (bucketAcl.WRITE.indexOf(publicId) > -1
|| (bucketAcl.WRITE.indexOf(allAuthedUsersId) > -1
&& canonicalID !== publicId)
|| (bucketAcl.FULL_CONTROL.indexOf(allAuthedUsersId) > -1
&& canonicalID !== publicId)
|| bucketAcl.FULL_CONTROL.indexOf(publicId) > -1) {
return true;
}
}
// Note that an account can have the ability to do objectPutACL,

View File

@ -245,6 +245,7 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
metadataStoreParams.versioning = options.versioning;
metadataStoreParams.isNull = options.isNull;
metadataStoreParams.nullVersionId = options.nullVersionId;
metadataStoreParams.nullUploadId = options.nullUploadId;
return _storeInMDandDeleteData(bucketName, infoArr,
cipherBundle, metadataStoreParams,
options.dataToDelete, requestLogger, requestMethod, next);

View File

@ -1,15 +1,18 @@
/**
* Check keys that exist in the current list which will be used in composing
* object. This method checks against accidentally removing data keys due to
* instability from the metadata layer. The check returns true if there was no
* match and false if at least one key from the previous list exists in the
* current list
* Check if all keys that exist in the current list which will be used
* in composing object are not present in the old object's list.
*
* This method can be used to check against accidentally removing data
* keys due to instability from the metadata layer, or for replay
* detection in general.
*
* @param {array|string|null} prev - list of keys from the object being
* overwritten
* @param {array} curr - list of keys to be used in composing current object
* @returns {array} list of keys that can be deleted
* @returns {boolean} true if no key in `curr` is present in `prev`,
* false otherwise
*/
function locationKeysSanityCheck(prev, curr) {
function locationKeysHaveChanged(prev, curr) {
if (!prev || prev.length === 0) {
return true;
}
@ -22,4 +25,4 @@ function locationKeysSanityCheck(prev, curr) {
return curr.every(v => !keysMap[v.key]);
}
module.exports = locationKeysSanityCheck;
module.exports = locationKeysHaveChanged;

View File

@ -128,7 +128,23 @@ function _deleteNullVersionMD(bucketName, objKey, options, mst, log, cb) {
});
}
function processVersioningState(mst, vstat, cb) {
/**
* Process state from the master version of an object and the bucket
* versioning configuration, return a set of options objects
*
* @param {object} mst - state of master version, as returned by
* getMasterState()
* @param {string} vstat - bucket versioning status: 'Enabled' or 'Suspended'
*
* @return {object} result object with the following attributes:
* - {object} options: versioning-related options to pass to the
services.metadataStoreObject() call
* - {object} [storeOptions]: options for metadata to create a new
null version key, if needed
* - {object} [delOptions]: options for metadata to delete the null
version key, if needed
*/
function processVersioningState(mst, vstat) {
const options = {};
const storeOptions = {};
const delOptions = {};
@ -142,9 +158,12 @@ function processVersioningState(mst, vstat, cb) {
// if null version exists, clean it up prior to put
if (mst.isNull) {
delOptions.versionId = mst.versionId;
return cb(null, options, null, delOptions);
if (mst.uploadId) {
delOptions.replayId = mst.uploadId;
}
return cb(null, options);
return { options, delOptions };
}
return { options };
}
// versioning is enabled, create a new version
options.versioning = true;
@ -154,9 +173,14 @@ function processVersioningState(mst, vstat, cb) {
storeOptions.versionId = versionId;
storeOptions.isNull = true;
options.nullVersionId = versionId;
return cb(null, options, storeOptions);
// non-versioned (non-null) MPU objects don't have a
// replay ID, so don't reference their uploadId
if (mst.isNull && mst.uploadId) {
options.nullUploadId = mst.uploadId;
}
return cb(null, options);
return { options, storeOptions };
}
return { options };
}
// master is versioned and is not a null version
const nullVersionId = mst.nullVersionId;
@ -165,17 +189,36 @@ function processVersioningState(mst, vstat, cb) {
options.versionId = '';
options.isNull = true;
if (nullVersionId === undefined) {
return cb(null, options);
return { options };
}
delOptions.versionId = nullVersionId;
return cb(null, options, null, delOptions);
if (mst.nullUploadId) {
delOptions.replayId = mst.nullUploadId;
}
return { options, delOptions };
}
// versioning is enabled, put the new version
options.versioning = true;
options.nullVersionId = nullVersionId;
return cb(null, options);
if (mst.nullUploadId) {
options.nullUploadId = mst.nullUploadId;
}
return { options };
}
/**
* Build the state of the master version from its object metadata
*
* @param {object} objMD - object metadata parsed from JSON
*
* @return {object} state of master version, with the following attributes:
* - {boolean} exists - true if the object exists (i.e. if `objMD` is truish)
* - {string} versionId - version ID of the master key
* - {boolean} isNull - whether the master version is a null version
* - {string} nullVersionId - if not a null version, reference to the
* null version ID
* - {array} objLocation - array of data locations
*/
function getMasterState(objMD) {
if (!objMD) {
return {};
@ -183,8 +226,10 @@ function getMasterState(objMD) {
const mst = {
exists: true,
versionId: objMD.versionId,
uploadId: objMD.uploadId,
isNull: objMD.isNull,
nullVersionId: objMD.nullVersionId,
nullUploadId: objMD.nullUploadId,
};
if (objMD.location) {
mst.objLocation = Array.isArray(objMD.location) ?
@ -212,35 +257,29 @@ function getMasterState(objMD) {
*/
function versioningPreprocessing(bucketName, bucketMD, objectKey, objMD,
log, callback) {
const options = {};
const mst = getMasterState(objMD);
const vCfg = bucketMD.getVersioningConfiguration();
// bucket is not versioning configured
if (!vCfg) {
options.dataToDelete = mst.objLocation;
const options = { dataToDelete: mst.objLocation };
return process.nextTick(callback, null, options);
}
// bucket is versioning configured
return async.waterfall([
function processState(next) {
processVersioningState(mst, vCfg.Status,
(err, options, storeOptions, delOptions) => {
process.nextTick(next, err, options, storeOptions,
delOptions);
});
},
function storeVersion(options, storeOptions, delOptions, next) {
const { options, storeOptions, delOptions } =
processVersioningState(mst, vCfg.Status);
return async.series([
function storeVersion(next) {
if (!storeOptions) {
return process.nextTick(next, null, options, delOptions);
return process.nextTick(next);
}
const versionMD = Object.assign({}, objMD, storeOptions);
const params = { versionId: storeOptions.versionId };
return _storeNullVersionMD(bucketName, objectKey, versionMD,
params, log, err => next(err, options, delOptions));
params, log, next);
},
function deleteNullVersion(options, delOptions, next) {
function deleteNullVersion(next) {
if (!delOptions) {
return process.nextTick(next, null, options);
return process.nextTick(next);
}
return _deleteNullVersionMD(bucketName, objectKey, delOptions, mst,
log, (err, nullDataToDelete) => {
@ -258,10 +297,10 @@ function versioningPreprocessing(bucketName, bucketMD, objectKey, objMD,
return next(errors.InternalError);
}
Object.assign(options, { dataToDelete: nullDataToDelete });
return next(null, options);
return next();
});
},
], (err, options) => callback(err, options));
], err => callback(err, options));
}
/** preprocessingVersioningDelete - return versioning information for S3 to
@ -290,6 +329,9 @@ function preprocessingVersioningDelete(bucketName, bucketMD, objectMD,
// deleting a specific version
options.deleteData = true;
options.versionId = reqVersionId;
if (objectMD.uploadId) {
options.replayId = objectMD.uploadId;
}
return callback(null, options);
}
if (reqVersionId) {
@ -297,18 +339,26 @@ function preprocessingVersioningDelete(bucketName, bucketMD, objectMD,
if (objectMD.versionId === undefined) {
// object is not versioned, deleting it
options.deleteData = true;
// non-versioned (non-null) MPU objects don't have a
// replay ID, so don't reference their uploadId
return callback(null, options);
}
if (objectMD.isNull) {
// master is the null version
options.deleteData = true;
options.versionId = objectMD.versionId;
if (objectMD.uploadId) {
options.replayId = objectMD.uploadId;
}
return callback(null, options);
}
if (objectMD.nullVersionId) {
// null version exists, deleting it
options.deleteData = true;
options.versionId = objectMD.nullVersionId;
if (objectMD.nullUploadId) {
options.replayId = objectMD.nullUploadId;
}
return callback(null, options);
}
// null version does not exist, no deletion
@ -323,6 +373,8 @@ module.exports = {
decodeVersionId,
getVersionIdResHeader,
checkQueryVersionId,
processVersioningState,
getMasterState,
versioningPreprocessing,
preprocessingVersioningDelete,
};

View File

@ -12,6 +12,12 @@ const versionIdUtils = versioning.VersionID;
const { generateToken, decryptToken }
= require('../api/apiUtils/object/continueToken');
// do not url encode the continuation tokens
const skipUrlEncoding = new Set([
'ContinuationToken',
'NextContinuationToken',
]);
/* Sample XML response for GET bucket objects V2:
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Name>example-bucket</Name>
@ -204,14 +210,14 @@ function processMasterVersions(bucketName, listParams, list) {
const escapeXmlFn = listParams.encoding === 'url' ?
querystring.escape : escapeForXml;
xmlParams.forEach(p => {
if (p.value || p.tag === 'KeyCount') {
if (p.value && skipUrlEncoding.has(p.tag)) {
xml.push(`<${p.tag}>${p.value}</${p.tag}>`);
} else if (p.value || p.tag === 'KeyCount') {
xml.push(`<${p.tag}>${escapeXmlFn(p.value)}</${p.tag}>`);
} else if (p.tag !== 'NextMarker' &&
p.tag !== 'EncodingType' &&
p.tag !== 'Delimiter' &&
p.tag !== 'StartAfter' &&
p.tag !== 'ContinuationToken' &&
p.tag !== 'NextContinuationToken') {
p.tag !== 'StartAfter') {
xml.push(`<${p.tag}/>`);
}
});

View File

@ -1,5 +1,4 @@
const aclUtils = require('../utilities/aclUtils');
const constants = require('../../constants');
const { metadataValidateBucket } = require('../metadata/metadataUtils');
const vault = require('../auth/vault');
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
@ -53,11 +52,6 @@ function bucketGetACL(authInfo, request, log, callback) {
displayName: undefined,
},
};
const grantsByURI = [
constants.publicId,
constants.allAuthedUsersId,
constants.logId,
];
metadataValidateBucket(metadataValParams, log, (err, bucket) => {
const corsHeaders = collectCorsHeaders(request.headers.origin,
@ -68,13 +62,6 @@ function bucketGetACL(authInfo, request, log, callback) {
return callback(err, null, corsHeaders);
}
const bucketACL = bucket.getAcl();
const allSpecificGrants = [].concat(
bucketACL.FULL_CONTROL,
bucketACL.WRITE,
bucketACL.WRITE_ACP,
bucketACL.READ,
bucketACL.READ_ACP
);
grantInfo.ownerInfo.ID = bucket.getOwner();
grantInfo.ownerInfo.displayName = bucket.getOwnerDisplayName();
const ownerGrant = {
@ -100,21 +87,9 @@ function bucketGetACL(authInfo, request, log, callback) {
* privileges, want to display both and not lose the duplicate
* when receive one dictionary entry back from Vault)
*/
const canonicalIDs = allSpecificGrants
.filter(item => grantsByURI.indexOf(item) < 0);
const canonicalIDs = aclUtils.getCanonicalIDs(bucketACL);
// Build array with grants by URI
const uriGrantInfo = grantsByURI.map(uri => {
const permission = aclUtils.getPermissionType(uri, bucketACL,
'bucket');
if (permission) {
return {
URI: uri,
permission,
};
}
return undefined;
}).filter(item => item !== undefined);
const uriGrantInfo = aclUtils.getUriGrantInfo(bucketACL);
if (canonicalIDs.length === 0) {
/**
* If no acl's set by account canonicalID, just add URI
@ -138,25 +113,8 @@ function bucketGetACL(authInfo, request, log, callback) {
{ method: 'vault.getEmailAddresses', error: err });
return callback(err, null, corsHeaders);
}
const individualGrants = canonicalIDs.map(canonicalID => {
/**
* Emails dict only contains entries that were found
* in Vault
*/
if (emails[canonicalID]) {
const permission = aclUtils.getPermissionType(
canonicalID, bucketACL, 'bucket');
if (permission) {
const displayName = emails[canonicalID];
return {
ID: canonicalID,
displayName,
permission,
};
}
}
return undefined;
}).filter(item => item !== undefined);
const individualGrants =
aclUtils.getIndividualGrants(bucketACL, canonicalIDs, emails);
// Add to grantInfo any individual grants and grants by uri
grantInfo.grants = grantInfo.grants
.concat(individualGrants).concat(uriGrantInfo);

View File

@ -20,8 +20,8 @@ const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
const { skipMpuPartProcessing } = require('../data/external/utils');
const locationConstraintCheck
= require('./apiUtils/object/locationConstraintCheck');
const locationKeysSanityCheck
= require('./apiUtils/object/locationKeysSanityCheck');
const locationKeysHaveChanged
= require('./apiUtils/object/locationKeysHaveChanged');
const logger = require('../utilities/logger');
@ -327,6 +327,7 @@ function completeMultipartUpload(authInfo, request, log, callback) {
metaStoreParams.versioning = options.versioning;
metaStoreParams.isNull = options.isNull;
metaStoreParams.nullVersionId = options.nullVersionId;
metaStoreParams.nullUploadId = options.nullUploadId;
return next(null, destBucket, dataLocations,
metaStoreParams, mpuBucket, keysToDelete, aggregateETag,
objMD, extraPartLocations, pseudoCipherBundle,
@ -362,9 +363,7 @@ function completeMultipartUpload(authInfo, request, log, callback) {
// metadata keys, which are likely to have failed in
// the previous MPU completion attempt
//
const onlyDifferentLocationKeys = locationKeysSanityCheck(
objMD.location, dataLocations);
if (!onlyDifferentLocationKeys) {
if (!locationKeysHaveChanged(objMD.location, dataLocations)) {
log.info('MPU complete request replay detected', {
method: 'completeMultipartUpload.storeAsNewObj',
bucketName: destinationBucket.getName(),
@ -446,10 +445,14 @@ function completeMultipartUpload(authInfo, request, log, callback) {
resHeaders['x-amz-version-id'] =
versionIdUtils.encode(generatedVersionId);
}
const vcfg = destinationBucket.getVersioningConfiguration();
const isVersionedObj = vcfg && vcfg.Status === 'Enabled';
xmlParams.eTag = `"${aggregateETag}"`;
const xml = convertToXml('completeMultipartUpload', xmlParams);
pushMetric('completeMultipartUpload', log, {
oldByteLength,
oldByteLength: isVersionedObj ? null : oldByteLength,
authInfo,
canonicalID: destinationBucket.getOwner(),
bucket: bucketName,

View File

@ -408,6 +408,8 @@ function objectCopy(authInfo, request, sourceBucket,
storeMetadataParams.isNull = options.isNull;
// eslint-disable-next-line
storeMetadataParams.nullVersionId = options.nullVersionId;
// eslint-disable-next-line
storeMetadataParams.nullUploadId = options.nullUploadId;
const dataToDelete = options.dataToDelete;
return next(null, storeMetadataParams, destDataGetInfoArr,
destObjMD, serverSideEncryption, destBucketMD,

View File

@ -55,6 +55,7 @@ function objectDelete(authInfo, request, log, cb) {
if (err) {
return next(err, bucketMD);
}
const versioningCfg = bucketMD.getVersioningConfiguration();
if (!objMD) {
if (!versioningCfg) {

View File

@ -3,7 +3,6 @@ const { errors } = require('arsenal');
const aclUtils = require('../utilities/aclUtils');
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
const constants = require('../../constants');
const { pushMetric } = require('../utapi/utilities');
const { decodeVersionId, getVersionIdResHeader }
= require('./apiUtils/object/versioning');
@ -68,10 +67,6 @@ function objectGetACL(authInfo, request, log, callback) {
displayName: undefined,
},
};
const grantsByURI = [constants.publicId,
constants.allAuthedUsersId,
constants.logId,
];
return async.waterfall([
function validateBucketAndObj(next) {
@ -106,12 +101,6 @@ function objectGetACL(authInfo, request, log, callback) {
const verCfg = bucket.getVersioningConfiguration();
const resVersionId = getVersionIdResHeader(verCfg, objectMD);
const objectACL = objectMD.acl;
const allSpecificGrants = [].concat(
objectACL.FULL_CONTROL,
objectACL.WRITE_ACP,
objectACL.READ,
objectACL.READ_ACP
).filter(item => item !== undefined);
grantInfo.ownerInfo.ID = objectMD['owner-id'];
grantInfo.ownerInfo.displayName = objectMD['owner-display-name'];
// Object owner always has full control
@ -144,20 +133,9 @@ function objectGetACL(authInfo, request, log, callback) {
* privileges, want to display both and not lose the duplicate
* when receive one dictionary entry back from Vault)
*/
const canonicalIDs = allSpecificGrants.filter(item =>
grantsByURI.indexOf(item) < 0);
const canonicalIDs = aclUtils.getCanonicalIDs(objectACL);
// Build array with grants by URI
const uriGrantInfo = grantsByURI.map(uri => {
const permission = aclUtils.getPermissionType(uri, objectACL,
'object');
if (permission) {
return {
URI: uri,
permission,
};
}
return undefined;
}).filter(item => item !== undefined);
const uriGrantInfo = aclUtils.getUriGrantInfo(objectACL);
if (canonicalIDs.length === 0) {
/**
@ -179,25 +157,8 @@ function objectGetACL(authInfo, request, log, callback) {
{ method: 'objectGetACL', error: err });
return next(err, bucket);
}
const individualGrants = canonicalIDs.map(canonicalID => {
/**
* Emails dict only contains entries that were found
* in Vault
*/
if (emails[canonicalID]) {
const permission = aclUtils.getPermissionType(
canonicalID, objectACL, 'object');
if (permission) {
const displayName = emails[canonicalID];
return {
ID: canonicalID,
displayName,
permission,
};
}
}
return undefined;
}).filter(item => item !== undefined);
const individualGrants = aclUtils.getIndividualGrants(objectACL,
canonicalIDs, emails);
// Add to grantInfo any individual grants and grants by uri
grantInfo.grants = grantInfo.grants
.concat(individualGrants).concat(uriGrantInfo);

View File

@ -65,6 +65,7 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
'from non-owner account');
return callback(errors.NoSuchBucket);
}
return async.waterfall([
function handleTransientOrDeleteBuckets(next) {
if (bucket.hasTransientFlag() || bucket.hasDeletedFlag()) {

View File

@ -13,6 +13,8 @@ const { dataStore } = require('../api/apiUtils/object/storeObject');
const prepareRequestContexts = require(
'../api/apiUtils/authorization/prepareRequestContexts');
const { decodeVersionId } = require('../api/apiUtils/object/versioning');
const locationKeysHaveChanged
= require('../api/apiUtils/object/locationKeysHaveChanged');
const { metadataValidateBucketAndObj,
metadataGetObject } = require('../metadata/metadataUtils');
const { BackendInfo } = require('../api/apiUtils/object/BackendInfo');
@ -335,6 +337,36 @@ function putMetadata(request, response, bucketInfo, objMd, log, callback) {
return callback(err);
}
pushReplicationMetric(objMd, omVal, bucketName, objectKey, log);
if (objMd &&
headers['x-scal-replication-content'] !== 'METADATA' &&
locationKeysHaveChanged(objMd.location, omVal.location)) {
log.info('removing old data locations', {
method: 'putMetadata',
bucketName,
objectKey,
});
async.eachLimit(objMd.location, 5,
(loc, next) => data.delete(loc, log, err => {
if (err) {
log.warn('error removing old data location key', {
bucketName,
objectKey,
locationKey: loc,
error: err.message,
});
}
// do not forward the error to let other
// locations be deleted
next();
}),
() => {
log.debug('done removing old data locations', {
method: 'putMetadata',
bucketName,
objectKey,
});
});
}
return _respond(response, md, log, callback);
});
});

View File

@ -94,8 +94,8 @@ const services = {
const { objectKey, authInfo, size, contentMD5, metaHeaders,
contentType, cacheControl, contentDisposition, contentEncoding,
expires, multipart, headers, overrideMetadata, log,
lastModifiedDate, versioning, versionId, tagging, taggingCopy,
replicationInfo, dataStoreName } = params;
lastModifiedDate, versioning, versionId, uploadId,
tagging, taggingCopy, replicationInfo, dataStoreName } = params;
log.trace('storing object in metadata');
assert.strictEqual(typeof bucketName, 'string');
const md = new ObjectMD();
@ -140,11 +140,16 @@ const services = {
if (versionId || versionId === '') {
options.versionId = versionId;
}
if (uploadId) {
md.setUploadId(uploadId);
options.replayId = uploadId;
}
// information to store about the version and the null version id
// in the object metadata
const { isNull, nullVersionId, isDeleteMarker } = params;
const { isNull, nullVersionId, nullUploadId, isDeleteMarker } = params;
md.setIsNull(isNull)
.setNullVersionId(nullVersionId)
.setNullUploadId(nullUploadId)
.setIsDeleteMarker(isDeleteMarker);
if (versionId && versionId !== 'null') {
md.setVersionId(versionId);

View File

@ -11,6 +11,11 @@ const possibleGrantHeaders = ['x-amz-grant-read', 'x-amz-grant-write',
const regexpEmailAddress = /^\S+@\S+.\S+$/;
const aclUtils = {};
const grantsByURI = [
constants.publicId,
constants.allAuthedUsersId,
constants.logId,
];
/**
* handleCannedGrant - Populate grantInfo for a bucketGetACL or objectGetACL
@ -320,4 +325,91 @@ aclUtils.checkGrantHeaderValidity = function checkGrantHeaderValidity(headers) {
return true;
};
/**
* getGrants - Get all acl grants as an object
* @param {object} acl - acl from metadata
* @returns {object} grants
*/
function getGrants(acl) {
return {
FULL_CONTROL: acl.FULL_CONTROL,
WRITE: acl.WRITE,
WRITE_ACP: acl.WRITE_ACP,
READ: acl.READ,
READ_ACP: acl.READ_ACP,
};
}
/**
* getCanonicalIDs - Get all the unique canonical IDs from object or bucket acl
* @param {object} acl - acl from metadata
* @returns {array} canonicalIDs - array of unique canonicalIDs from acl
*/
aclUtils.getCanonicalIDs = function getCanonicalIDs(acl) {
const aclGrantees = [].concat(
acl.FULL_CONTROL,
acl.WRITE,
acl.WRITE_ACP,
acl.READ,
acl.READ_ACP
);
const uniqueGrantees = Array.from(new Set(aclGrantees));
// grantees can be a mix of canonicalIDs and predefined groups in the form
// of uri, so filter out only canonicalIDs
return uniqueGrantees.filter(item => item && !grantsByURI.includes(item));
};
/**
* getUriGrantInfo - Get all the grants from object or bucket acl by uri
* @param {object} acl - acl from metadata
* @returns {array} uriGrantInfo - array of grants by uri
*/
aclUtils.getUriGrantInfo = function getUriGrantInfo(acl) {
const grants = getGrants(acl);
const uriGrantInfo = [];
const validGrants = Object.entries(grants)
.filter(([permission, grantees]) => permission
&& Array.isArray(grantees));
validGrants.forEach(([permission, grantees]) => {
grantees.filter(grantee => grantsByURI.includes(grantee))
.forEach(grantee => {
uriGrantInfo.push({
URI: grantee,
permission,
});
});
});
return uriGrantInfo;
};
/**
* getIndividualGrants - Get all the grants from object or bucket acl mapped to
* canonicalID/email
* @param {object} acl - acl from metadata
* @param {array} canonicalIDs - list of canonicalIDs from acl
* @param {array} emails - canonicalID/email dictionary
* @returns {array} individualGrantInfo - array of grants mapped to
* canonicalID/email
*/
aclUtils.getIndividualGrants = function getIndividualGrants(acl, canonicalIDs,
emails) {
const grants = getGrants(acl);
const individualGrantInfo = [];
const validGrants = Object.entries(grants)
.filter(([permission, grantees]) => permission
&& Array.isArray(grantees));
validGrants.forEach(([permission, grantees]) => {
grantees.filter(grantee => canonicalIDs.includes(grantee)
&& emails[grantee])
.forEach(grantee => {
individualGrantInfo.push({
ID: grantee,
displayName: emails[grantee],
permission,
});
});
});
return individualGrantInfo;
};
module.exports = aclUtils;

View File

@ -20,7 +20,7 @@
"homepage": "https://github.com/scality/S3#readme",
"dependencies": {
"@hapi/joi": "^17.1.0",
"arsenal": "github:scality/Arsenal#f17006b",
"arsenal": "github:scality/Arsenal#f4f867d",
"async": "~2.5.0",
"aws-sdk": "2.905.0",
"azure-storage": "^2.1.0",

View File

@ -38,6 +38,7 @@ withV4(sigCfg => {
function cbWithError(done) {
return err => {
assert.notStrictEqual(err, null);
assert.strictEqual(err.statusCode, errors.AccessDenied.code);
done();
};
@ -99,7 +100,7 @@ withV4(sigCfg => {
});
});
it.skip('should grant write access', done => {
it('should grant write access', done => {
s3.putBucketAcl({
Bucket: testBucket,
GrantWrite: grantUri,
@ -114,7 +115,7 @@ withV4(sigCfg => {
});
});
it.skip('should grant write access with ' +
it('should grant write access with ' +
'grant-full-control', done => {
s3.putBucketAcl({
Bucket: testBucket,
@ -130,8 +131,7 @@ withV4(sigCfg => {
});
});
// TODO: handle write acl in S3C-4401
it.skip('should not grant write access', done => {
it('should not grant write access', done => {
s3.putBucketAcl(aclParam, err => {
assert.ifError(err);
const param = {
@ -143,7 +143,7 @@ withV4(sigCfg => {
});
});
it.skip('should not grant write access on an object not owned ' +
it('should grant write access on an object not owned ' +
'by the grantee', done => {
s3.putBucketAcl({
Bucket: testBucket,
@ -155,11 +155,11 @@ withV4(sigCfg => {
Body: testBody,
Key: ownerObjKey,
};
awsRequest(auth, 'putObject', param, cbWithError(done));
awsRequest(auth, 'putObject', param, cbNoError(done));
});
});
it.skip('should not delete object not owned by the ' +
it(`should ${auth ? '' : 'not '}delete object not owned by the` +
'grantee', done => {
s3.putBucketAcl({
Bucket: testBucket,
@ -170,7 +170,18 @@ withV4(sigCfg => {
Bucket: testBucket,
Key: ownerObjKey,
};
awsRequest(auth, 'deleteObject', param, cbWithError(done));
awsRequest(auth, 'deleteObject', param, err => {
if (auth) {
assert.ifError(err);
} else {
assert.notStrictEqual(err, null);
assert.strictEqual(
err.statusCode,
errors.AccessDenied.code
);
}
done();
});
});
});

View File

@ -324,6 +324,141 @@ describeSkipIfAWS('backbeat routes', () => {
done();
});
});
it('should remove old object data locations if version is overwritten',
done => {
let oldLocation;
const testKeyOldData = `${testKey}-old-data`;
async.waterfall([next => {
// put object's data locations
makeBackbeatRequest({
method: 'PUT', bucket: TEST_BUCKET,
objectKey: testKey,
resourceType: 'data',
headers: {
'content-length': testData.length,
'content-md5': testDataMd5,
'x-scal-canonical-id': testArn,
},
authCredentials: backbeatAuthCredentials,
requestBody: testData }, next);
}, (response, next) => {
assert.strictEqual(response.statusCode, 200);
// put object metadata
const newMd = Object.assign({}, testMd);
newMd.location = JSON.parse(response.body);
oldLocation = newMd.location;
makeBackbeatRequest({
method: 'PUT', bucket: TEST_BUCKET,
objectKey: testKey,
resourceType: 'metadata',
authCredentials: backbeatAuthCredentials,
requestBody: JSON.stringify(newMd),
}, next);
}, (response, next) => {
assert.strictEqual(response.statusCode, 200);
// put another object which metadata reference the
// same data locations, we will attempt to retrieve
// this object at the end of the test to confirm that
// its locations have been deleted
const oldDataMd = Object.assign({}, testMd);
oldDataMd.location = oldLocation;
makeBackbeatRequest({
method: 'PUT', bucket: TEST_BUCKET,
objectKey: testKeyOldData,
resourceType: 'metadata',
authCredentials: backbeatAuthCredentials,
requestBody: JSON.stringify(oldDataMd),
}, next);
}, (response, next) => {
assert.strictEqual(response.statusCode, 200);
// create new data locations
makeBackbeatRequest({
method: 'PUT', bucket: TEST_BUCKET,
objectKey: testKey,
resourceType: 'data',
headers: {
'content-length': testData.length,
'content-md5': testDataMd5,
'x-scal-canonical-id': testArn,
},
authCredentials: backbeatAuthCredentials,
requestBody: testData }, next);
}, (response, next) => {
assert.strictEqual(response.statusCode, 200);
// overwrite the original object version, now
// with references to the new data locations
const newMd = Object.assign({}, testMd);
newMd.location = JSON.parse(response.body);
makeBackbeatRequest({
method: 'PUT', bucket: TEST_BUCKET,
objectKey: testKey,
resourceType: 'metadata',
authCredentials: backbeatAuthCredentials,
requestBody: JSON.stringify(newMd),
}, next);
}, (response, next) => {
assert.strictEqual(response.statusCode, 200);
// give some time for the async deletes to complete
setTimeout(() => checkObjectData(s3, testKey, testData, next),
1000);
}, next => {
// check that the object copy referencing the old data
// locations is unreadable, confirming that the old
// data locations have been deleted
s3.getObject({
Bucket: TEST_BUCKET,
Key: testKeyOldData,
}, err => {
assert(err, 'expected error to get object with old data ' +
'locations, got success');
next();
});
}], err => {
assert.ifError(err);
done();
});
});
it('should not remove data locations on replayed metadata PUT',
done => {
let serializedNewMd;
async.waterfall([next => {
makeBackbeatRequest({
method: 'PUT', bucket: TEST_BUCKET,
objectKey: testKey,
resourceType: 'data',
headers: {
'content-length': testData.length,
'content-md5': testDataMd5,
'x-scal-canonical-id': testArn,
},
authCredentials: backbeatAuthCredentials,
requestBody: testData }, next);
}, (response, next) => {
assert.strictEqual(response.statusCode, 200);
const newMd = Object.assign({}, testMd);
newMd.location = JSON.parse(response.body);
serializedNewMd = JSON.stringify(newMd);
async.timesSeries(2, (i, putDone) => makeBackbeatRequest({
method: 'PUT', bucket: TEST_BUCKET,
objectKey: testKey,
resourceType: 'metadata',
authCredentials: backbeatAuthCredentials,
requestBody: serializedNewMd,
}, (err, response) => {
assert.ifError(err);
assert.strictEqual(response.statusCode, 200);
putDone(err);
}), () => next());
}, next => {
// check that the object is still readable to make
// sure we did not remove the data keys
checkObjectData(s3, testKey, testData, next);
}], err => {
assert.ifError(err);
done();
});
});
});
describe('backbeat authorization checks', () => {
[{ method: 'PUT', resourceType: 'metadata' },

View File

@ -1,41 +1,41 @@
const assert = require('assert');
const locationKeysSanityCheck =
require('../../../../lib/api/apiUtils/object/locationKeysSanityCheck');
const locationKeysHaveChanged =
require('../../../../lib/api/apiUtils/object/locationKeysHaveChanged');
describe('Sanity check for location keys', () => {
describe('Check if location keys have changed between object locations', () => {
it('should return true for no match ', () => {
const prev = [{ key: 'aaa' }, { key: 'bbb' }, { key: 'ccc' }];
const curr = [{ key: 'ddd' }, { key: 'eee' }, { key: 'fff' }];
assert.strictEqual(locationKeysSanityCheck(prev, curr), true);
assert.strictEqual(locationKeysHaveChanged(prev, curr), true);
});
it('should return false if there is a match of 1 key', () => {
const prev = [{ key: 'aaa' }, { key: 'bbb' }, { key: 'ccc' }];
const curr = [{ key: 'ddd' }, { key: 'aaa' }, { key: 'fff' }];
assert.strictEqual(locationKeysSanityCheck(prev, curr), false);
assert.strictEqual(locationKeysHaveChanged(prev, curr), false);
});
it('should return false if all keys match', () => {
const prev = [{ key: 'aaa' }, { key: 'bbb' }, { key: 'ccc' }];
const curr = [{ key: 'aaa' }, { key: 'bbb' }, { key: 'ccc' }];
assert.strictEqual(locationKeysSanityCheck(prev, curr), false);
assert.strictEqual(locationKeysHaveChanged(prev, curr), false);
});
it('should return false if there is match (model version 2)', () => {
const prev = 'ccc';
const curr = [{ key: 'aaa' }, { key: 'bbb' }, { key: 'ccc' }];
assert.strictEqual(locationKeysSanityCheck(prev, curr), false);
assert.strictEqual(locationKeysHaveChanged(prev, curr), false);
});
it('should return true if there is no match(model version 2)', () => {
const prev = 'aaa';
const curr = [{ key: 'ddd' }, { key: 'eee' }, { key: 'fff' }];
assert.strictEqual(locationKeysSanityCheck(prev, curr), true);
assert.strictEqual(locationKeysHaveChanged(prev, curr), true);
});
it('should return true if prev location is null', () => {
const prev = null;
const curr = [{ key: 'ddd' }, { key: 'eee' }, { key: 'fff' }];
assert.strictEqual(locationKeysSanityCheck(prev, curr), true);
assert.strictEqual(locationKeysHaveChanged(prev, curr), true);
});
});

View File

@ -0,0 +1,437 @@
const assert = require('assert');
const { errors, versioning } = require('arsenal');
const { config } = require('../../../../lib/Config');
const INF_VID = versioning.VersionID.getInfVid(config.replicationGroupId);
const { processVersioningState, getMasterState,
preprocessingVersioningDelete } =
require('../../../../lib/api/apiUtils/object/versioning');
describe('versioning helpers', () => {
describe('getMasterState+processVersioningState', () => {
[
{
description: 'no prior version exists',
objMD: null,
versioningEnabledExpectedRes: {
options: {
versioning: true,
},
},
versioningSuspendedExpectedRes: {
options: {
isNull: true,
versionId: '',
},
},
},
{
description: 'prior non-null object version exists',
objMD: {
versionId: 'v1',
},
versioningEnabledExpectedRes: {
options: {
versioning: true,
},
},
versioningSuspendedExpectedRes: {
options: {
isNull: true,
versionId: '',
},
},
},
{
description: 'prior MPU object non-null version exists',
objMD: {
versionId: 'v1',
uploadId: 'fooUploadId',
},
versioningEnabledExpectedRes: {
options: {
versioning: true,
},
},
versioningSuspendedExpectedRes: {
options: {
isNull: true,
versionId: '',
},
},
},
{
description: 'prior null object version exists',
objMD: {
versionId: 'vnull',
isNull: true,
},
versioningEnabledExpectedRes: {
options: {
versioning: true,
nullVersionId: 'vnull',
},
// instruct to first copy the null version onto a
// newly created version key preserving the version ID
storeOptions: {
isNull: true,
versionId: 'vnull',
},
},
versioningSuspendedExpectedRes: {
options: {
isNull: true,
versionId: '',
},
delOptions: {
versionId: 'vnull',
},
},
},
{
description: 'prior MPU object null version exists',
objMD: {
versionId: 'vnull',
isNull: true,
uploadId: 'fooUploadId',
},
versioningEnabledExpectedRes: {
options: {
versioning: true,
nullVersionId: 'vnull',
nullUploadId: 'fooUploadId',
},
// instruct to first copy the null version onto a
// newly created version key preserving the version ID
storeOptions: {
isNull: true,
versionId: 'vnull',
},
},
versioningSuspendedExpectedRes: {
options: {
isNull: true,
versionId: '',
},
delOptions: {
versionId: 'vnull',
replayId: 'fooUploadId',
},
},
},
{
description:
'prior object exists, put before versioning was first enabled',
objMD: {},
versioningEnabledExpectedRes: {
options: {
versioning: true,
nullVersionId: INF_VID,
},
// instruct to first copy the null version onto a
// newly created version key as the oldest version
storeOptions: {
isNull: true,
versionId: INF_VID,
},
},
versioningSuspendedExpectedRes: {
options: {
isNull: true,
versionId: '',
},
},
},
{
description: 'prior MPU object exists, put before versioning ' +
'was first enabled',
objMD: {
uploadId: 'fooUploadId',
},
versioningEnabledExpectedRes: {
options: {
versioning: true,
nullVersionId: INF_VID,
},
// instruct to first copy the null version onto a
// newly created version key as the oldest version
storeOptions: {
isNull: true,
versionId: INF_VID,
},
},
versioningSuspendedExpectedRes: {
options: {
isNull: true,
versionId: '',
},
},
},
{
description:
'prior non-null object version exists with ref to null version',
objMD: {
versionId: 'v1',
nullVersionId: 'vnull',
},
versioningEnabledExpectedRes: {
options: {
versioning: true,
nullVersionId: 'vnull',
},
},
versioningSuspendedExpectedRes: {
options: {
isNull: true,
versionId: '',
},
delOptions: {
versionId: 'vnull',
},
},
},
{
description: 'prior MPU object non-null version exists with ' +
'ref to null version',
objMD: {
versionId: 'v1',
uploadId: 'fooUploadId',
nullVersionId: 'vnull',
},
versioningEnabledExpectedRes: {
options: {
versioning: true,
nullVersionId: 'vnull',
},
},
versioningSuspendedExpectedRes: {
options: {
isNull: true,
versionId: '',
},
delOptions: {
versionId: 'vnull',
},
},
},
{
description: 'prior object non-null version exists with ' +
'ref to MPU null version',
objMD: {
versionId: 'v1',
nullVersionId: 'vnull',
nullUploadId: 'nullFooUploadId',
},
versioningEnabledExpectedRes: {
options: {
versioning: true,
nullVersionId: 'vnull',
nullUploadId: 'nullFooUploadId',
},
},
versioningSuspendedExpectedRes: {
options: {
isNull: true,
versionId: '',
},
delOptions: {
versionId: 'vnull',
replayId: 'nullFooUploadId',
},
},
},
].forEach(testCase =>
['Enabled', 'Suspended'].forEach(versioningStatus => it(
`${testCase.description}, versioning Status=${versioningStatus}`,
() => {
const mst = getMasterState(testCase.objMD);
// stringify and parse to get rid of the "undefined"
// properties, artifacts of how the function builds the
// result
const res = JSON.parse(
JSON.stringify(
processVersioningState(mst, versioningStatus)
)
);
const expectedRes =
testCase[`versioning${versioningStatus}ExpectedRes`];
assert.deepStrictEqual(res, expectedRes);
})));
});
describe('preprocessingVersioningDelete', () => {
[
{
description: 'no reqVersionId: no delete action',
objMD: {
versionId: 'v1',
},
expectedRes: {},
},
{
description: 'delete non-null object version',
objMD: {
versionId: 'v1',
},
reqVersionId: 'v1',
expectedRes: {
deleteData: true,
versionId: 'v1',
},
},
{
description: 'delete MPU object non-null version',
objMD: {
versionId: 'v1',
uploadId: 'fooUploadId',
},
reqVersionId: 'v1',
expectedRes: {
deleteData: true,
versionId: 'v1',
replayId: 'fooUploadId',
},
},
{
description: 'delete null object version',
objMD: {
versionId: 'vnull',
isNull: true,
},
reqVersionId: 'null',
expectedRes: {
deleteData: true,
versionId: 'vnull',
},
},
{
description: 'delete MPU object null version',
objMD: {
versionId: 'vnull',
isNull: true,
uploadId: 'fooUploadId',
},
reqVersionId: 'null',
expectedRes: {
deleteData: true,
versionId: 'vnull',
replayId: 'fooUploadId',
},
},
{
description:
'delete object put before versioning was first enabled',
objMD: {},
reqVersionId: 'null',
expectedRes: {
deleteData: true,
},
},
{
description:
'delete MPU object put before versioning was first enabled',
objMD: {
uploadId: 'fooUploadId',
},
reqVersionId: 'null',
expectedRes: {
deleteData: true,
},
},
{
description:
'delete non-null object version with ref to null version',
objMD: {
versionId: 'v1',
nullVersionId: 'vnull',
},
reqVersionId: 'v1',
expectedRes: {
deleteData: true,
versionId: 'v1',
},
},
{
description:
'delete MPU object non-null version with ref to null version',
objMD: {
versionId: 'v1',
uploadId: 'fooUploadId',
nullVersionId: 'vnull',
},
reqVersionId: 'v1',
expectedRes: {
deleteData: true,
versionId: 'v1',
replayId: 'fooUploadId',
},
},
{
description:
'delete non-null object version with ref to MPU null version',
objMD: {
versionId: 'v1',
nullVersionId: 'vnull',
nullUploadId: 'nullFooUploadId',
},
reqVersionId: 'v1',
expectedRes: {
deleteData: true,
versionId: 'v1',
},
},
{
description:
'delete null object version from ref to null version',
objMD: {
versionId: 'v1',
nullVersionId: 'vnull',
},
reqVersionId: 'null',
expectedRes: {
deleteData: true,
versionId: 'vnull',
},
},
{
description:
'delete MPU object null version from ref to null version',
objMD: {
versionId: 'v1',
nullVersionId: 'vnull',
nullUploadId: 'nullFooUploadId',
},
reqVersionId: 'null',
expectedRes: {
deleteData: true,
versionId: 'vnull',
replayId: 'nullFooUploadId',
},
},
{
description: 'delete null version that does not exist',
objMD: {
versionId: 'v1',
},
reqVersionId: 'null',
expectedError: errors.NoSuchKey,
},
].forEach(testCase => it(testCase.description, done => {
const mockBucketMD = {
getVersioningConfiguration: () => ({ Status: 'Enabled' }),
};
preprocessingVersioningDelete(
'foobucket', mockBucketMD, testCase.objMD,
testCase.reqVersionId, null, (err, options) => {
if (testCase.expectedError) {
assert.strictEqual(err, testCase.expectedError);
} else {
assert.ifError(err);
assert.deepStrictEqual(options, testCase.expectedRes);
}
done();
});
}));
});
});

View File

@ -12,7 +12,6 @@ const DummyRequest = require('../DummyRequest');
const { errors } = require('arsenal');
const authInfo = makeAuthInfo('accessKey1');
const bucketName = 'bucketname';
const delimiter = '/';
@ -23,7 +22,7 @@ const prefix = 'sub';
const objectName1 = `${prefix}${delimiter}objectName1`;
const objectName2 = `${prefix}${delimiter}objectName2`;
const objectName3 = 'notURIvalid$$';
const objectName3 = 'invalidURI~~~b';
const objectName4 = `${objectName1}&><"\'`;
const testPutBucketRequest = new DummyRequest({
bucketName,
@ -108,6 +107,44 @@ const tests = [
assert.strictEqual(result.ListBucketResult.Contents[1], undefined);
},
},
{
name: 'next token is not url encoded',
request: Object.assign(
{
query: { 'max-keys': '1' },
url: baseUrl,
},
baseGetRequest
),
assertion: result => {
assert.strictEqual(result.ListBucketResult.Contents[0].Key[0],
objectName3);
assert.strictEqual(result.ListBucketResult.Contents[1], undefined);
assert.strictEqual(
result.ListBucketResult.NextContinuationToken[0],
'aW52YWxpZFVSSX5+fmI='
);
},
},
{
name: 'next token is not url encoded even with encoding url enabled',
request: Object.assign(
{
query: { 'encoding-type': 'url', 'max-keys': '1' },
url: baseUrl,
},
baseGetRequest
),
assertion: result => {
assert.strictEqual(result.ListBucketResult.Contents[0].Key[0],
objectName3);
assert.strictEqual(result.ListBucketResult.Contents[1], undefined);
assert.strictEqual(
result.ListBucketResult.NextContinuationToken[0],
'aW52YWxpZFVSSX5+fmI='
);
},
},
{
name: 'return max-keys number from request even if greater than ' +
'actual keys returned',

View File

@ -53,8 +53,7 @@ describe('bucketGetACL API', () => {
(corsHeaders, next) => bucketGetACL(authInfo,
testGetACLRequest, log, next),
(result, corsHeaders, next) => parseString(result, next),
],
(err, result) => {
], (err, result) => {
assert.strictEqual(result.AccessControlPolicy.
AccessControlList[0].Grant[0].Grantee[0]
.ID[0], canonicalID);
@ -86,8 +85,7 @@ describe('bucketGetACL API', () => {
(corsHeaders, next) => bucketGetACL(authInfo, testGetACLRequest,
log, next),
(result, corsHeaders, next) => parseString(result, next),
],
(err, result) => {
], (err, result) => {
assert.strictEqual(result.AccessControlPolicy.
AccessControlList[0].Grant[0].Grantee[0]
.ID[0], canonicalID);
@ -130,8 +128,7 @@ describe('bucketGetACL API', () => {
(corsHeaders, next) => bucketGetACL(authInfo, testGetACLRequest,
log, next),
(result, corsHeaders, next) => parseString(result, next),
],
(err, result) => {
], (err, result) => {
assert.strictEqual(result.AccessControlPolicy.
AccessControlList[0].Grant[0].Grantee[0]
.ID[0], canonicalID);
@ -168,8 +165,7 @@ describe('bucketGetACL API', () => {
(corsHeaders, next) => bucketGetACL(authInfo, testGetACLRequest,
log, next),
(result, corsHeaders, next) => parseString(result, next),
],
(err, result) => {
], (err, result) => {
assert.strictEqual(result.AccessControlPolicy.
AccessControlList[0].Grant[0].Grantee[0]
.ID[0], canonicalID);
@ -207,8 +203,7 @@ describe('bucketGetACL API', () => {
(corsHeaders, next) => bucketGetACL(authInfo, testGetACLRequest,
log, next),
(result, corsHeaders, next) => parseString(result, next),
],
(err, result) => {
], (err, result) => {
assert.strictEqual(result.AccessControlPolicy.
AccessControlList[0].Grant[0].Grantee[0]
.ID[0], canonicalID);
@ -266,8 +261,7 @@ describe('bucketGetACL API', () => {
(corsHeaders, next) => bucketGetACL(authInfo, testGetACLRequest,
log, next),
(result, corsHeaders, next) => parseString(result, next),
],
(err, result) => {
], (err, result) => {
assert.strictEqual(result.AccessControlPolicy.
AccessControlList[0].Grant[0].Grantee[0]
.ID[0], canonicalIDforSample1);
@ -321,4 +315,97 @@ describe('bucketGetACL API', () => {
done();
});
});
const grantsByURI = [
constants.publicId,
constants.allAuthedUsersId,
constants.logId,
];
grantsByURI.forEach(uri => {
it('should get all ACLs when predefined group - ' +
`${uri} is used for multiple grants`, done => {
const testPutACLRequest = {
bucketName,
namespace,
headers: {
'host': `${bucketName}.s3.amazonaws.com`,
'x-amz-grant-full-control': `uri = ${uri}`,
'x-amz-grant-read': `uri = ${uri}`,
'x-amz-grant-write': `uri = ${uri}`,
'x-amz-grant-read-acp': `uri = ${uri}`,
'x-amz-grant-write-acp': `uri = ${uri}`,
},
url: '/?acl',
query: { acl: '' },
};
async.waterfall([
next => bucketPut(authInfo, testBucketPutRequest,
log, next), (corsHeaders, next) =>
bucketPutACL(authInfo, testPutACLRequest, log, next),
(corsHeaders, next) => bucketGetACL(authInfo,
testGetACLRequest, log, next),
(result, corsHeaders, next) => parseString(result, next),
], (err, result) => {
assert.ifError(err);
const grants =
result.AccessControlPolicy.AccessControlList[0].Grant;
grants.forEach(grant => {
assert.strictEqual(grant.Permission.length, 1);
assert.strictEqual(grant.Grantee.length, 1);
assert.strictEqual(grant.Grantee[0].URI.length, 1);
assert.strictEqual(grant.Grantee[0].URI[0], `${uri}`);
});
done();
});
});
});
it('should get all ACLs when predefined groups are used for ' +
'more than one grant', done => {
const { allAuthedUsersId, publicId } = constants;
const testPutACLRequest = {
bucketName,
namespace,
headers: {
'host': `${bucketName}.s3.amazonaws.com`,
'x-amz-grant-write': `uri = ${allAuthedUsersId} `,
'x-amz-grant-write-acp': `uri = ${allAuthedUsersId} `,
'x-amz-grant-read': `uri = ${publicId} `,
'x-amz-grant-read-acp': `uri = ${publicId} `,
},
url: '/?acl',
query: { acl: '' },
};
async.waterfall([
next => bucketPut(authInfo, testBucketPutRequest, log, next),
(corsHeaders, next) =>
bucketPutACL(authInfo, testPutACLRequest, log, next),
(corsHeaders, next) => bucketGetACL(authInfo, testGetACLRequest,
log, next),
(result, corsHeaders, next) => parseString(result, next),
], (err, result) => {
assert.ifError(err);
const grants =
result.AccessControlPolicy.AccessControlList[0].Grant;
grants.forEach(grant => {
const permissions = grant.Permission;
assert.strictEqual(permissions.length, 1);
const permission = permissions[0];
assert.strictEqual(grant.Grantee.length, 1);
const grantees = grant.Grantee[0].URI;
assert.strictEqual(grantees.length, 1);
const grantee = grantees[0];
if (['WRITE', 'WRITE_ACP'].includes(permission)) {
assert.strictEqual(grantee, constants.allAuthedUsersId);
}
if (['READ', 'READ_ACP'].includes(permission)) {
assert.strictEqual(grantee, constants.publicId);
}
});
done();
});
});
});

View File

@ -518,6 +518,7 @@ describe('Multipart Upload API', () => {
assert(MD);
assert.strictEqual(MD['x-amz-meta-stuff'],
'I am some user metadata');
assert.strictEqual(MD.uploadId, testUploadId);
done();
});
});
@ -1754,12 +1755,14 @@ describe('complete mpu with versioning', () => {
versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Enabled');
const suspendVersioningRequest = versioningTestUtils
.createBucketPutVersioningReq(bucketName, 'Suspended');
const testPutObjectRequests = objData.slice(0, 2).map(data =>
versioningTestUtils.createPutObjectRequest(bucketName, objectKey,
data));
let testPutObjectRequests;
beforeEach(done => {
cleanup();
testPutObjectRequests = objData
.slice(0, 2)
.map(data => versioningTestUtils.createPutObjectRequest(
bucketName, objectKey, data));
bucketPut(authInfo, bucketPutRequest, log, done);
});
@ -1769,7 +1772,58 @@ describe('complete mpu with versioning', () => {
});
it('should delete null version when creating new null version, ' +
'even when null version is not the latest version', done => {
'when null version is the latest version', done => {
async.waterfall([
next => bucketPutVersioning(authInfo,
suspendVersioningRequest, log, err => next(err)),
next => initiateMultipartUpload(
authInfo, initiateRequest, log, next),
(result, corsHeaders, next) => parseString(result, next),
(json, next) => {
const partBody = objData[2];
const testUploadId =
json.InitiateMultipartUploadResult.UploadId[0];
const partRequest = _createPutPartRequest(testUploadId, 1,
partBody);
objectPutPart(authInfo, partRequest, undefined, log,
(err, eTag) => next(err, eTag, testUploadId));
},
(eTag, testUploadId, next) => {
const origPutObject = metadataBackend.putObject;
metadataBackend.putObject =
(bucketName, objName, objVal, params, log, cb) => {
assert.strictEqual(params.replayId, testUploadId);
metadataBackend.putObject = origPutObject;
metadataBackend.putObject(
bucketName, objName, objVal, params, log, cb);
};
const parts = [{ partNumber: 1, eTag }];
const completeRequest = _createCompleteMpuRequest(testUploadId,
parts);
completeMultipartUpload(authInfo, completeRequest, log,
err => next(err, testUploadId));
},
(testUploadId, next) => {
const origDeleteObject = metadataBackend.deleteObject;
metadataBackend.deleteObject =
(bucketName, objName, params, log, cb) => {
assert.strictEqual(params.replayId, testUploadId);
metadataBackend.deleteObject = origDeleteObject;
metadataBackend.deleteObject(
bucketName, objName, params, log, cb);
};
// overwrite null version with a non-MPU object
objectPut(authInfo, testPutObjectRequests[1],
undefined, log, err => next(err));
},
], err => {
assert.ifError(err, `Unexpected err: ${err}`);
done();
});
});
it('should delete null version when creating new null version, ' +
'when null version is not the latest version', done => {
async.waterfall([
// putting null version: put obj before versioning configured
next => objectPut(authInfo, testPutObjectRequests[0],
@ -1797,18 +1851,39 @@ describe('complete mpu with versioning', () => {
(err, eTag) => next(err, eTag, testUploadId));
},
(eTag, testUploadId, next) => {
const origPutObject = metadataBackend.putObject;
metadataBackend.putObject =
(bucketName, objName, objVal, params, log, cb) => {
assert.strictEqual(params.replayId, testUploadId);
metadataBackend.putObject = origPutObject;
metadataBackend.putObject(
bucketName, objName, objVal, params, log, cb);
};
const parts = [{ partNumber: 1, eTag }];
const completeRequest = _createCompleteMpuRequest(testUploadId,
parts);
completeMultipartUpload(authInfo, completeRequest, log, next);
completeMultipartUpload(authInfo, completeRequest, log,
err => next(err, testUploadId));
},
(testUploadId, next) => {
versioningTestUtils.assertDataStoreValues(
ds, [undefined, objData[1], objData[2]]);
const origDeleteObject = metadataBackend.deleteObject;
metadataBackend.deleteObject =
(bucketName, objName, params, log, cb) => {
assert.strictEqual(params.replayId, testUploadId);
metadataBackend.deleteObject = origDeleteObject;
metadataBackend.deleteObject(
bucketName, objName, params, log, cb);
};
// overwrite null version with a non-MPU object
objectPut(authInfo, testPutObjectRequests[1],
undefined, log, err => next(err));
},
], err => {
assert.ifError(err, `Unexpected err: ${err}`);
process.nextTick(() => {
versioningTestUtils.assertDataStoreValues(ds, [undefined,
objData[1], objData[2]]);
done(err);
});
done();
});
});

View File

@ -1,5 +1,8 @@
const assert = require('assert');
const async = require('async');
const crypto = require('crypto');
const { errors } = require('arsenal');
const xml2js = require('xml2js');
const { bucketPut } = require('../../../lib/api/bucketPut');
const bucketPutACL = require('../../../lib/api/bucketPutACL');
@ -8,6 +11,11 @@ const { cleanup, DummyRequestLogger, makeAuthInfo } = require('../helpers');
const objectPut = require('../../../lib/api/objectPut');
const objectDelete = require('../../../lib/api/objectDelete');
const objectGet = require('../../../lib/api/objectGet');
const initiateMultipartUpload
= require('../../../lib/api/initiateMultipartUpload');
const objectPutPart = require('../../../lib/api/objectPutPart');
const completeMultipartUpload
= require('../../../lib/api/completeMultipartUpload');
const DummyRequest = require('../DummyRequest');
const log = new DummyRequestLogger();
@ -69,6 +77,14 @@ describe('objectDelete API', () => {
url: `/${bucketName}/${objectKey}`,
});
const initiateMPURequest = {
bucketName,
namespace,
objectKey,
headers: { host: `${bucketName}.s3.amazonaws.com` },
url: `/${objectKey}?uploads`,
};
it('should delete an object', done => {
bucketPut(authInfo, testBucketPutRequest, log, () => {
objectPut(authInfo, testPutObjectRequest,
@ -112,6 +128,57 @@ describe('objectDelete API', () => {
});
});
it('should delete a multipart upload', done => {
const partBody = Buffer.from('I am a part\n', 'utf8');
let testUploadId;
let calculatedHash;
async.waterfall([
next => bucketPut(authInfo, testBucketPutRequest, log, next),
(corsHeaders, next) => initiateMultipartUpload(authInfo,
initiateMPURequest, log, next),
(result, corsHeaders, next) => xml2js.parseString(result, next),
(json, next) => {
testUploadId = json.InitiateMultipartUploadResult.UploadId[0];
const md5Hash = crypto.createHash('md5').update(partBody);
calculatedHash = md5Hash.digest('hex');
const partRequest = new DummyRequest({
bucketName,
namespace,
objectKey,
headers: { host: `${bucketName}.s3.amazonaws.com` },
url: `/${objectKey}?partNumber=1&uploadId=${testUploadId}`,
query: {
partNumber: '1',
uploadId: testUploadId,
},
calculatedHash,
}, partBody);
objectPutPart(authInfo, partRequest, undefined, log, next);
},
(hexDigest, corsHeaders, next) => {
const completeBody = '<CompleteMultipartUpload>' +
'<Part>' +
'<PartNumber>1</PartNumber>' +
`<ETag>"${calculatedHash}"</ETag>` +
'</Part>' +
'</CompleteMultipartUpload>';
const completeRequest = {
bucketName,
namespace,
objectKey,
parsedHost: 's3.amazonaws.com',
url: `/${objectKey}?uploadId=${testUploadId}`,
headers: { host: `${bucketName}.s3.amazonaws.com` },
query: { uploadId: testUploadId },
post: completeBody,
};
completeMultipartUpload(authInfo, completeRequest, log, next);
},
(result, resHeaders, next) =>
objectDelete(authInfo, testDeleteRequest, log, next),
], done);
});
it('should prevent anonymous user deleteObject API access', done => {
const publicAuthInfo = makeAuthInfo(constants.publicId);
bucketPut(authInfo, testBucketPutRequest, log, () => {

View File

@ -64,8 +64,7 @@ describe('objectGetACL API', () => {
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
],
(err, result) => {
], (err, result) => {
assert.strictEqual(result.AccessControlPolicy.
AccessControlList[0].Grant[0].Grantee[0]
.ID[0], canonicalID);
@ -103,8 +102,7 @@ describe('objectGetACL API', () => {
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
],
(err, result) => {
], (err, result) => {
assert.strictEqual(result.AccessControlPolicy.
AccessControlList[0].Grant[0].Grantee[0]
.ID[0], canonicalID);
@ -139,8 +137,7 @@ describe('objectGetACL API', () => {
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
],
(err, result) => {
], (err, result) => {
assert.strictEqual(result.AccessControlPolicy.
AccessControlList[0].Grant[0].Grantee[0]
.ID[0], canonicalID);
@ -182,8 +179,7 @@ describe('objectGetACL API', () => {
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
],
(err, result) => {
], (err, result) => {
assert.strictEqual(result.AccessControlPolicy.
AccessControlList[0].Grant[0].Grantee[0]
.ID[0], canonicalID);
@ -223,8 +219,7 @@ describe('objectGetACL API', () => {
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
],
(err, result) => {
], (err, result) => {
assert.strictEqual(result.AccessControlPolicy.
AccessControlList[0].Grant[0].Grantee[0]
.ID[0], canonicalID);
@ -263,8 +258,7 @@ describe('objectGetACL API', () => {
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
],
(err, result) => {
], (err, result) => {
assert.strictEqual(result.AccessControlPolicy.
AccessControlList[0].Grant[0].Grantee[0]
.ID[0], canonicalID);
@ -313,8 +307,7 @@ describe('objectGetACL API', () => {
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
],
(err, result) => {
], (err, result) => {
assert.strictEqual(result.AccessControlPolicy.
AccessControlList[0].Grant[0].Grantee[0]
.ID[0], '79a59df900b949e55d96a1e698fbacedfd6e09d98' +
@ -367,4 +360,49 @@ describe('objectGetACL API', () => {
done();
});
});
const grantsByURI = [
constants.publicId,
constants.allAuthedUsersId,
];
grantsByURI.forEach(uri => {
it('should get all ACLs when predefined group - ' +
`${uri} is used for multiple grants`, done => {
const testPutObjectRequest = new DummyRequest({
bucketName,
namespace,
objectKey: objectName,
headers: {
'x-amz-grant-full-control': `uri=${uri}`,
'x-amz-grant-read': `uri=${uri}`,
'x-amz-grant-read-acp': `uri=${uri}`,
'x-amz-grant-write-acp': `uri=${uri}`,
},
url: `/${bucketName}/${objectName}`,
}, postBody);
async.waterfall([
next => bucketPut(authInfo, testBucketPutRequest,
log, next),
(corsHeaders, next) => objectPut(authInfo,
testPutObjectRequest, undefined, log, next),
(resHeaders, next) => {
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
], (err, result) => {
assert.ifError(err);
const grants =
result.AccessControlPolicy.AccessControlList[0].Grant;
grants.forEach(grant => {
assert.strictEqual(grant.Permission.length, 1);
assert.strictEqual(grant.Grantee.length, 1);
assert.strictEqual(grant.Grantee[0].URI.length, 1);
assert.strictEqual(grant.Grantee[0].URI[0], `${uri}`);
});
done();
});
});
});
});

View File

@ -282,9 +282,9 @@ arraybuffer.slice@~0.0.7:
resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675"
integrity sha512-wGUIVQXuehL5TCqQun8OW81jGzAWycqzFF8lFp+GOM5BXLYj3bKNsYC4daB7n6XjCqxQA/qgTJ+8ANR3acjrog==
"arsenal@github:scality/Arsenal#f17006b":
version "7.5.0"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/f17006b91eaefce2be00c2600ae55e4d63265333"
"arsenal@github:scality/Arsenal#f4f867d":
version "7.4.13"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/f4f867d9fbe3b7c4be5ca95a179b5d7c19ffe784"
dependencies:
"@hapi/joi" "^15.1.0"
JSONStream "^1.0.0"
@ -298,6 +298,7 @@ arraybuffer.slice@~0.0.7:
level "~5.0.1"
level-sublevel "~6.6.5"
node-forge "^0.7.1"
prom-client "10.2.3"
simple-glob "^0.2"
socket.io "~2.3.0"
socket.io-client "~2.3.0"
@ -624,6 +625,11 @@ bindings@^1.1.1:
dependencies:
file-uri-to-path "1.0.0"
bintrees@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/bintrees/-/bintrees-1.0.1.tgz#0e655c9b9c2435eaab68bf4027226d2b55a34524"
integrity sha1-DmVcm5wkNeqraL9AJyJtK1WjRSQ=
bl@^2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/bl/-/bl-2.2.1.tgz#8c11a7b730655c5d56898cdc871224f40fd901d5"
@ -3139,6 +3145,13 @@ progress@^1.1.8:
resolved "https://registry.yarnpkg.com/progress/-/progress-1.1.8.tgz#e260c78f6161cdd9b0e56cc3e0a85de17c7a57be"
integrity sha1-4mDHj2Fhzdmw5WzD4Khd4Xx6V74=
prom-client@10.2.3:
version "10.2.3"
resolved "https://registry.yarnpkg.com/prom-client/-/prom-client-10.2.3.tgz#a51bf21c239c954a6c5be4b1361fdd380218bb41"
integrity sha512-Xboq5+TdUwuQtSSDRZRNnb5NprINlgQN999VqUjZxnLKydUNLeIPx6Eiahg6oJua3XBg2TGnh5Cth1s4I6+r7g==
dependencies:
tdigest "^0.1.1"
prr@~0.0.0:
version "0.0.0"
resolved "https://registry.yarnpkg.com/prr/-/prr-0.0.0.tgz#1a84b85908325501411853d0081ee3fa86e2926a"
@ -3860,6 +3873,13 @@ table@^3.7.8:
slice-ansi "0.0.4"
string-width "^2.0.0"
tdigest@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/tdigest/-/tdigest-0.1.1.tgz#2e3cb2c39ea449e55d1e6cd91117accca4588021"
integrity sha1-Ljyyw56kSeVdHmzZEReszKRYgCE=
dependencies:
bintrees "1.0.1"
text-table@~0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"