Compare commits
18 Commits
developmen
...
improvemen
Author | SHA1 | Date |
---|---|---|
Maha Benzekri | 893ea7575b | |
Maha Benzekri | 6bc82943e3 | |
Maha Benzekri | a67ea27f65 | |
Will Toozs | 9e63dfc491 | |
Maha Benzekri | 46bdcf5af1 | |
Will Toozs | aee828749a | |
Will Toozs | 04f6f317af | |
Maha Benzekri | fd691c6493 | |
Will Toozs | 4b3983fb78 | |
Maha Benzekri | 33d7c99e0c | |
Maha Benzekri | 6fb225eb5c | |
Maha Benzekri | a7396a721c | |
Will Toozs | 7aa326cba9 | |
Will Toozs | 10a0672b68 | |
Will Toozs | 698f5a44d4 | |
Maha Benzekri | 46ce0a9082 | |
Maha Benzekri | 9712ebd12a | |
Will Toozs | 2b0eb9ddd1 |
|
@ -153,6 +153,8 @@ const constants = {
|
|||
'objectDeleteTagging',
|
||||
'objectGetTagging',
|
||||
'objectPutTagging',
|
||||
'objectPutLegalHold',
|
||||
'objectPutRetention',
|
||||
],
|
||||
// response header to be sent when there are invalid
|
||||
// user metadata in the object's metadata
|
||||
|
@ -174,6 +176,11 @@ const constants = {
|
|||
'user',
|
||||
'bucket',
|
||||
],
|
||||
arrayOfAllowed: [
|
||||
'objectPutTagging',
|
||||
'objectPutLegalHold',
|
||||
'objectPutRetention',
|
||||
],
|
||||
allowedUtapiEventFilterStates: ['allow', 'deny'],
|
||||
// The AWS assumed Role resource type
|
||||
assumedRoleArnResourceType: 'assumed-role',
|
||||
|
|
|
@ -107,6 +107,7 @@ const api = {
|
|||
// no need to check auth on website or cors preflight requests
|
||||
if (apiMethod === 'websiteGet' || apiMethod === 'websiteHead' ||
|
||||
apiMethod === 'corsPreflight') {
|
||||
request.actionImplicitDenies = false;
|
||||
return this[apiMethod](request, log, callback);
|
||||
}
|
||||
|
||||
|
@ -129,15 +130,25 @@ const api = {
|
|||
|
||||
const requestContexts = prepareRequestContexts(apiMethod, request,
|
||||
sourceBucket, sourceObject, sourceVersionId);
|
||||
// Extract all the _apiMethods and store them in an array
|
||||
const apiMethods = requestContexts ? requestContexts.map(context => context._apiMethod) : [];
|
||||
// Attach the names to the current request
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
request.apiMethods = apiMethods;
|
||||
|
||||
function checkAuthResults(authResults) {
|
||||
let returnTagCount = true;
|
||||
const isImplicitDeny = {};
|
||||
let isOnlyImplicitDeny = true;
|
||||
if (apiMethod === 'objectGet') {
|
||||
// first item checks s3:GetObject(Version) action
|
||||
if (!authResults[0].isAllowed) {
|
||||
if (!authResults[0].isAllowed && !authResults[0].isImplicit) {
|
||||
log.trace('get object authorization denial from Vault');
|
||||
return errors.AccessDenied;
|
||||
}
|
||||
// TODO add support for returnTagCount in the bucket policy
|
||||
// checks
|
||||
isImplicitDeny[authResults[0].action] = authResults[0].isImplicit;
|
||||
// second item checks s3:GetObject(Version)Tagging action
|
||||
if (!authResults[1].isAllowed) {
|
||||
log.trace('get tagging authorization denial ' +
|
||||
|
@ -146,13 +157,26 @@ const api = {
|
|||
}
|
||||
} else {
|
||||
for (let i = 0; i < authResults.length; i++) {
|
||||
if (!authResults[i].isAllowed) {
|
||||
isImplicitDeny[authResults[i].action] = true;
|
||||
if (!authResults[i].isAllowed && !authResults[i].isImplicit) {
|
||||
// Any explicit deny rejects the current API call
|
||||
log.trace('authorization denial from Vault');
|
||||
return errors.AccessDenied;
|
||||
}
|
||||
if (authResults[i].isAllowed) {
|
||||
// If the action is allowed, the result is not implicit
|
||||
// Deny.
|
||||
isImplicitDeny[authResults[i].action] = false;
|
||||
isOnlyImplicitDeny = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return returnTagCount;
|
||||
// These two APIs cannot use ACLs or Bucket Policies, hence, any
|
||||
// implicit deny from vault must be treated as an explicit deny.
|
||||
if ((apiMethod === 'bucketPut' || apiMethod === 'serviceGet') && isOnlyImplicitDeny) {
|
||||
return errors.AccessDenied;
|
||||
}
|
||||
return { returnTagCount, isImplicitDeny };
|
||||
}
|
||||
|
||||
return async.waterfall([
|
||||
|
@ -230,7 +254,16 @@ const api = {
|
|||
if (checkedResults instanceof Error) {
|
||||
return callback(checkedResults);
|
||||
}
|
||||
returnTagCount = checkedResults;
|
||||
returnTagCount = checkedResults.returnTagCount;
|
||||
request.actionImplicitDenies = checkedResults.isImplicitDeny;
|
||||
} else {
|
||||
// create an object of keys apiMethods with all values to false:
|
||||
// for backward compatibility, all apiMethods are allowed by default
|
||||
// thus it is explicitly allowed, so implicit deny is false
|
||||
request.actionImplicitDenies = apiMethods.reduce((acc, curr) => {
|
||||
acc[curr] = false;
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
|
||||
request._response = response;
|
||||
|
|
|
@ -1,20 +1,35 @@
|
|||
const { evaluators, actionMaps, RequestContext } = require('arsenal').policies;
|
||||
const constants = require('../../../../constants');
|
||||
|
||||
const { allAuthedUsersId, bucketOwnerActions, logId, publicId,
|
||||
assumedRoleArnResourceType, backbeatLifecycleSessionName } = constants;
|
||||
const {
|
||||
allAuthedUsersId, bucketOwnerActions, logId, publicId,
|
||||
assumedRoleArnResourceType, backbeatLifecycleSessionName, arrayOfAllowed,
|
||||
} = constants;
|
||||
|
||||
// whitelist buckets to allow public read on objects
|
||||
const publicReadBuckets = process.env.ALLOW_PUBLIC_READ_BUCKETS ?
|
||||
process.env.ALLOW_PUBLIC_READ_BUCKETS.split(',') : [];
|
||||
const publicReadBuckets = process.env.ALLOW_PUBLIC_READ_BUCKETS
|
||||
? process.env.ALLOW_PUBLIC_READ_BUCKETS.split(',') : [];
|
||||
|
||||
function checkBucketAcls(bucket, requestType, canonicalID) {
|
||||
function checkBucketAcls(bucket, requestType, canonicalID, mainApiCall) {
|
||||
// Same logic applies on the Versioned APIs, so let's simplify it.
|
||||
const requestTypeParsed = requestType.endsWith('Version')
|
||||
? requestType.slice(0, -7) : requestType;
|
||||
if (bucket.getOwner() === canonicalID) {
|
||||
return true;
|
||||
}
|
||||
if (mainApiCall === 'objectGet') {
|
||||
if (requestTypeParsed === 'objectGetTagging') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if (mainApiCall === 'objectPut') {
|
||||
if (arrayOfAllowed.includes(requestTypeParsed)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
const bucketAcl = bucket.getAcl();
|
||||
if (requestType === 'bucketGet' || requestType === 'bucketHead') {
|
||||
if (requestTypeParsed === 'bucketGet' || requestTypeParsed === 'bucketHead') {
|
||||
if (bucketAcl.Canned === 'public-read'
|
||||
|| bucketAcl.Canned === 'public-read-write'
|
||||
|| (bucketAcl.Canned === 'authenticated-read'
|
||||
|
@ -32,7 +47,7 @@ function checkBucketAcls(bucket, requestType, canonicalID) {
|
|||
return true;
|
||||
}
|
||||
}
|
||||
if (requestType === 'bucketGetACL') {
|
||||
if (requestTypeParsed === 'bucketGetACL') {
|
||||
if ((bucketAcl.Canned === 'log-delivery-write'
|
||||
&& canonicalID === logId)
|
||||
|| bucketAcl.FULL_CONTROL.indexOf(canonicalID) > -1
|
||||
|
@ -48,7 +63,7 @@ function checkBucketAcls(bucket, requestType, canonicalID) {
|
|||
}
|
||||
}
|
||||
|
||||
if (requestType === 'bucketPutACL') {
|
||||
if (requestTypeParsed === 'bucketPutACL') {
|
||||
if (bucketAcl.FULL_CONTROL.indexOf(canonicalID) > -1
|
||||
|| bucketAcl.WRITE_ACP.indexOf(canonicalID) > -1) {
|
||||
return true;
|
||||
|
@ -62,11 +77,7 @@ function checkBucketAcls(bucket, requestType, canonicalID) {
|
|||
}
|
||||
}
|
||||
|
||||
if (requestType === 'bucketDelete' && bucket.getOwner() === canonicalID) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (requestType === 'objectDelete' || requestType === 'objectPut') {
|
||||
if (requestTypeParsed === 'objectDelete' || requestTypeParsed === 'objectPut') {
|
||||
if (bucketAcl.Canned === 'public-read-write'
|
||||
|| bucketAcl.FULL_CONTROL.indexOf(canonicalID) > -1
|
||||
|| bucketAcl.WRITE.indexOf(canonicalID) > -1) {
|
||||
|
@ -86,11 +97,12 @@ function checkBucketAcls(bucket, requestType, canonicalID) {
|
|||
// objectPutACL, objectGetACL, objectHead or objectGet, the bucket
|
||||
// authorization check should just return true so can move on to check
|
||||
// rights at the object level.
|
||||
return (requestType === 'objectPutACL' || requestType === 'objectGetACL' ||
|
||||
requestType === 'objectGet' || requestType === 'objectHead');
|
||||
return (requestTypeParsed === 'objectPutACL' || requestTypeParsed === 'objectGetACL'
|
||||
|| requestTypeParsed === 'objectGet' || requestTypeParsed === 'objectHead');
|
||||
}
|
||||
|
||||
function checkObjectAcls(bucket, objectMD, requestType, canonicalID) {
|
||||
function checkObjectAcls(bucket, objectMD, requestType, canonicalID, requesterIsNotUser,
|
||||
isUserUnauthenticated, mainApiCall) {
|
||||
const bucketOwner = bucket.getOwner();
|
||||
// acls don't distinguish between users and accounts, so both should be allowed
|
||||
if (bucketOwnerActions.includes(requestType)
|
||||
|
@ -100,6 +112,15 @@ function checkObjectAcls(bucket, objectMD, requestType, canonicalID) {
|
|||
if (objectMD['owner-id'] === canonicalID) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Backward compatibility
|
||||
if (mainApiCall === 'objectGet') {
|
||||
if ((isUserUnauthenticated || (requesterIsNotUser && bucketOwner === objectMD['owner-id']))
|
||||
&& requestType === 'objectGetTagging') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!objectMD.acl) {
|
||||
return false;
|
||||
}
|
||||
|
@ -169,9 +190,9 @@ function checkObjectAcls(bucket, objectMD, requestType, canonicalID) {
|
|||
// allow public reads on buckets that are whitelisted for anonymous reads
|
||||
// TODO: remove this after bucket policies are implemented
|
||||
const bucketAcl = bucket.getAcl();
|
||||
const allowPublicReads = publicReadBuckets.includes(bucket.getName()) &&
|
||||
bucketAcl.Canned === 'public-read' &&
|
||||
(requestType === 'objectGet' || requestType === 'objectHead');
|
||||
const allowPublicReads = publicReadBuckets.includes(bucket.getName())
|
||||
&& bucketAcl.Canned === 'public-read'
|
||||
&& (requestType === 'objectGet' || requestType === 'objectHead');
|
||||
if (allowPublicReads) {
|
||||
return true;
|
||||
}
|
||||
|
@ -268,75 +289,157 @@ function checkBucketPolicy(policy, requestType, canonicalID, arn, bucketOwner, l
|
|||
return permission;
|
||||
}
|
||||
|
||||
function isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request) {
|
||||
// Check to see if user is authorized to perform a
|
||||
// particular action on bucket based on ACLs.
|
||||
// TODO: Add IAM checks
|
||||
let requesterIsNotUser = true;
|
||||
let arn = null;
|
||||
if (authInfo) {
|
||||
requesterIsNotUser = !authInfo.isRequesterAnIAMUser();
|
||||
arn = authInfo.getArn();
|
||||
}
|
||||
// if the bucket owner is an account, users should not have default access
|
||||
if ((bucket.getOwner() === canonicalID) && requesterIsNotUser) {
|
||||
return true;
|
||||
}
|
||||
const aclPermission = checkBucketAcls(bucket, requestType, canonicalID);
|
||||
function processBucketPolicy(requestType, bucket, canonicalID, arn, bucketOwner, log,
|
||||
request, aclPermission, results, actionImplicitDenies) {
|
||||
const bucketPolicy = bucket.getBucketPolicy();
|
||||
|
||||
if (!bucketPolicy) {
|
||||
return aclPermission;
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
results[requestType] = actionImplicitDenies[requestType] === false && aclPermission;
|
||||
} else {
|
||||
const bucketPolicyPermission = checkBucketPolicy(bucketPolicy, requestType, canonicalID, arn,
|
||||
bucketOwner, log, request);
|
||||
|
||||
if (bucketPolicyPermission === 'explicitDeny') {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
results[requestType] = false;
|
||||
} else if (bucketPolicyPermission === 'allow') {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
results[requestType] = true;
|
||||
} else {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
results[requestType] = actionImplicitDenies[requestType] === false && aclPermission;
|
||||
}
|
||||
}
|
||||
const bucketPolicyPermission = checkBucketPolicy(bucketPolicy, requestType,
|
||||
canonicalID, arn, bucket.getOwner(), log, request);
|
||||
if (bucketPolicyPermission === 'explicitDeny') {
|
||||
return false;
|
||||
}
|
||||
return (aclPermission || (bucketPolicyPermission === 'allow'));
|
||||
return results[requestType];
|
||||
}
|
||||
|
||||
function isObjAuthorized(bucket, objectMD, requestType, canonicalID, authInfo, log, request) {
|
||||
const bucketOwner = bucket.getOwner();
|
||||
if (!objectMD) {
|
||||
function isBucketAuthorized(bucket, requestTypes, canonicalID, authInfo, log, request, actionImplicitDenies = {}) {
|
||||
if (!Array.isArray(requestTypes)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
requestTypes = [requestTypes];
|
||||
}
|
||||
if (!actionImplicitDenies) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
actionImplicitDenies = {};
|
||||
}
|
||||
const mainApiCall = requestTypes[0];
|
||||
const results = {};
|
||||
return requestTypes.every(_requestType => {
|
||||
// By default, all missing actions are defined as allowed from IAM, to be
|
||||
// backward compatible
|
||||
if (actionImplicitDenies[_requestType] === undefined) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
actionImplicitDenies[_requestType] = false;
|
||||
}
|
||||
// Check to see if user is authorized to perform a
|
||||
// particular action on bucket based on ACLs.
|
||||
// TODO: Add IAM checks
|
||||
let requesterIsNotUser = true;
|
||||
let arn = null;
|
||||
if (authInfo) {
|
||||
requesterIsNotUser = !authInfo.isRequesterAnIAMUser();
|
||||
arn = authInfo.getArn();
|
||||
}
|
||||
// if the bucket owner is an account, users should not have default access
|
||||
if ((bucket.getOwner() === canonicalID) && requesterIsNotUser) {
|
||||
results[_requestType] = actionImplicitDenies[_requestType] === false;
|
||||
return results[_requestType];
|
||||
}
|
||||
const aclPermission = checkBucketAcls(bucket, _requestType, canonicalID, mainApiCall);
|
||||
return processBucketPolicy(_requestType, bucket, canonicalID, arn, bucket.getOwner(), log,
|
||||
request, aclPermission, results, actionImplicitDenies);
|
||||
});
|
||||
}
|
||||
|
||||
function evaluateBucketPolicyWithIAM(bucket, requestTypes, canonicalID, authInfo, actionImplicitDenies = {},
|
||||
log, request) {
|
||||
if (!Array.isArray(requestTypes)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
requestTypes = [requestTypes];
|
||||
}
|
||||
if (!actionImplicitDenies) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
actionImplicitDenies = {};
|
||||
}
|
||||
const results = {};
|
||||
return requestTypes.every(_requestType => {
|
||||
// By default, all missing actions are defined as allowed from IAM, to be
|
||||
// backward compatible
|
||||
if (actionImplicitDenies[_requestType] === undefined) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
actionImplicitDenies[_requestType] = false;
|
||||
}
|
||||
let arn = null;
|
||||
if (authInfo) {
|
||||
arn = authInfo.getArn();
|
||||
}
|
||||
return processBucketPolicy(_requestType, bucket, canonicalID, arn, bucket.getOwner(), log,
|
||||
request, true, results, actionImplicitDenies);
|
||||
});
|
||||
}
|
||||
|
||||
function isObjAuthorized(bucket, objectMD, requestTypes, canonicalID, authInfo, log, request,
|
||||
actionImplicitDenies = {}) {
|
||||
if (!Array.isArray(requestTypes)) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
requestTypes = [requestTypes];
|
||||
}
|
||||
if (!actionImplicitDenies) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
actionImplicitDenies = {};
|
||||
}
|
||||
const results = {};
|
||||
const mainApiCall = requestTypes[0];
|
||||
return requestTypes.every(_requestType => {
|
||||
// By default, all missing actions are defined as allowed from IAM, to be
|
||||
// backward compatible
|
||||
if (actionImplicitDenies[_requestType] === undefined) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
actionImplicitDenies[_requestType] = false;
|
||||
}
|
||||
const parsedMethodName = _requestType.endsWith('Version')
|
||||
? _requestType.slice(0, -7) : _requestType;
|
||||
const bucketOwner = bucket.getOwner();
|
||||
if (!objectMD) {
|
||||
// User is already authorized on the bucket for FULL_CONTROL or WRITE or
|
||||
// bucket has canned ACL public-read-write
|
||||
if (requestType === 'objectPut' || requestType === 'objectDelete') {
|
||||
return true;
|
||||
if (parsedMethodName === 'objectPut' || parsedMethodName === 'objectDelete') {
|
||||
results[_requestType] = actionImplicitDenies[_requestType] === false;
|
||||
return results[_requestType];
|
||||
}
|
||||
// check bucket has read access
|
||||
// 'bucketGet' covers listObjects and listMultipartUploads, bucket read actions
|
||||
results[_requestType] = isBucketAuthorized(bucket, 'bucketGet', canonicalID, authInfo, log, request);
|
||||
return results[_requestType];
|
||||
}
|
||||
// check bucket has read access
|
||||
// 'bucketGet' covers listObjects and listMultipartUploads, bucket read actions
|
||||
return isBucketAuthorized(bucket, 'bucketGet', canonicalID, authInfo, log, request);
|
||||
}
|
||||
let requesterIsNotUser = true;
|
||||
let arn = null;
|
||||
if (authInfo) {
|
||||
requesterIsNotUser = !authInfo.isRequesterAnIAMUser();
|
||||
arn = authInfo.getArn();
|
||||
}
|
||||
if (objectMD['owner-id'] === canonicalID && requesterIsNotUser) {
|
||||
return true;
|
||||
}
|
||||
// account is authorized if:
|
||||
// - requesttype is included in bucketOwnerActions and
|
||||
// - account is the bucket owner
|
||||
// - requester is account, not user
|
||||
if (bucketOwnerActions.includes(requestType)
|
||||
let requesterIsNotUser = true;
|
||||
let arn = null;
|
||||
let isUserUnauthenticated = false;
|
||||
if (authInfo) {
|
||||
requesterIsNotUser = !authInfo.isRequesterAnIAMUser();
|
||||
arn = authInfo.getArn();
|
||||
isUserUnauthenticated = arn === undefined;
|
||||
}
|
||||
if (objectMD['owner-id'] === canonicalID && requesterIsNotUser) {
|
||||
results[_requestType] = actionImplicitDenies[_requestType] === false;
|
||||
return results[_requestType];
|
||||
}
|
||||
// account is authorized if:
|
||||
// - requesttype is included in bucketOwnerActions and
|
||||
// - account is the bucket owner
|
||||
// - requester is account, not user
|
||||
if (bucketOwnerActions.includes(parsedMethodName)
|
||||
&& (bucketOwner === canonicalID)
|
||||
&& requesterIsNotUser) {
|
||||
return true;
|
||||
}
|
||||
const aclPermission = checkObjectAcls(bucket, objectMD, requestType,
|
||||
canonicalID);
|
||||
const bucketPolicy = bucket.getBucketPolicy();
|
||||
if (!bucketPolicy) {
|
||||
return aclPermission;
|
||||
}
|
||||
const bucketPolicyPermission = checkBucketPolicy(bucketPolicy, requestType,
|
||||
canonicalID, arn, bucket.getOwner(), log, request);
|
||||
if (bucketPolicyPermission === 'explicitDeny') {
|
||||
return false;
|
||||
}
|
||||
return (aclPermission || (bucketPolicyPermission === 'allow'));
|
||||
results[_requestType] = actionImplicitDenies[_requestType] === false;
|
||||
return results[_requestType];
|
||||
}
|
||||
const aclPermission = checkObjectAcls(bucket, objectMD, parsedMethodName,
|
||||
canonicalID, requesterIsNotUser, isUserUnauthenticated, mainApiCall);
|
||||
return processBucketPolicy(_requestType, bucket, canonicalID, arn, bucketOwner,
|
||||
log, request, aclPermission, results, actionImplicitDenies);
|
||||
});
|
||||
}
|
||||
|
||||
function _checkResource(resource, bucketArn) {
|
||||
|
@ -383,9 +486,9 @@ function isLifecycleSession(arn) {
|
|||
const resourceType = resourceNames[0];
|
||||
const sessionName = resourceNames[resourceNames.length - 1];
|
||||
|
||||
return (service === 'sts' &&
|
||||
resourceType === assumedRoleArnResourceType &&
|
||||
sessionName === backbeatLifecycleSessionName);
|
||||
return (service === 'sts'
|
||||
&& resourceType === assumedRoleArnResourceType
|
||||
&& sessionName === backbeatLifecycleSessionName);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
@ -395,4 +498,5 @@ module.exports = {
|
|||
checkObjectAcls,
|
||||
validatePolicyResource,
|
||||
isLifecycleSession,
|
||||
evaluateBucketPolicyWithIAM,
|
||||
};
|
||||
|
|
|
@ -24,7 +24,7 @@ function _deleteMPUbucket(destinationBucketName, log, cb) {
|
|||
});
|
||||
}
|
||||
|
||||
function _deleteOngoingMPUs(authInfo, bucketName, bucketMD, mpus, log, cb) {
|
||||
function _deleteOngoingMPUs(authInfo, bucketName, bucketMD, mpus, request, log, cb) {
|
||||
async.mapLimit(mpus, 1, (mpu, next) => {
|
||||
const splitterChar = mpu.key.includes(oldSplitter) ?
|
||||
oldSplitter : splitter;
|
||||
|
@ -40,7 +40,7 @@ function _deleteOngoingMPUs(authInfo, bucketName, bucketMD, mpus, log, cb) {
|
|||
byteLength: partSizeSum,
|
||||
});
|
||||
next(err);
|
||||
});
|
||||
}, request);
|
||||
}, cb);
|
||||
}
|
||||
/**
|
||||
|
@ -49,11 +49,13 @@ function _deleteOngoingMPUs(authInfo, bucketName, bucketMD, mpus, log, cb) {
|
|||
* @param {object} bucketMD - bucket attributes/metadata
|
||||
* @param {string} bucketName - bucket in which objectMetadata is stored
|
||||
* @param {string} canonicalID - account canonicalID of requester
|
||||
* @param {object} request - request object given by router
|
||||
* including normalized headers
|
||||
* @param {object} log - Werelogs logger
|
||||
* @param {function} cb - callback from async.waterfall in bucketDelete
|
||||
* @return {undefined}
|
||||
*/
|
||||
function deleteBucket(authInfo, bucketMD, bucketName, canonicalID, log, cb) {
|
||||
function deleteBucket(authInfo, bucketMD, bucketName, canonicalID, request, log, cb) {
|
||||
log.trace('deleting bucket from metadata');
|
||||
assert.strictEqual(typeof bucketName, 'string');
|
||||
assert.strictEqual(typeof canonicalID, 'string');
|
||||
|
@ -100,7 +102,7 @@ function deleteBucket(authInfo, bucketMD, bucketName, canonicalID, log, cb) {
|
|||
}
|
||||
if (objectsListRes.Contents.length) {
|
||||
return _deleteOngoingMPUs(authInfo, bucketName,
|
||||
bucketMD, objectsListRes.Contents, log, err => {
|
||||
bucketMD, objectsListRes.Contents, request, log, err => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ const { errors } = require('arsenal');
|
|||
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const deleteBucket = require('./apiUtils/bucket/bucketDeletion');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
|
||||
/**
|
||||
|
@ -27,11 +27,11 @@ function bucketDelete(authInfo, request, log, cb) {
|
|||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'bucketDelete',
|
||||
requestType: request.apiMethods || 'bucketDelete',
|
||||
request,
|
||||
};
|
||||
|
||||
return metadataValidateBucket(metadataValParams, log,
|
||||
return metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucketMD) => {
|
||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||
request.method, bucketMD);
|
||||
|
@ -43,7 +43,7 @@ function bucketDelete(authInfo, request, log, cb) {
|
|||
log.trace('passed checks',
|
||||
{ method: 'metadataValidateBucket' });
|
||||
return deleteBucket(authInfo, bucketMD, bucketName,
|
||||
authInfo.getCanonicalID(), log, err => {
|
||||
authInfo.getCanonicalID(), request, log, err => {
|
||||
if (err) {
|
||||
return cb(err, corsHeaders);
|
||||
}
|
||||
|
|
|
@ -33,7 +33,8 @@ function bucketDeleteCors(authInfo, request, log, callback) {
|
|||
}
|
||||
log.trace('found bucket in metadata');
|
||||
|
||||
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request)) {
|
||||
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo,
|
||||
log, request, request.actionImplicitDenies)) {
|
||||
log.debug('access denied for user on bucket', {
|
||||
requestType,
|
||||
method: 'bucketDeleteCors',
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const async = require('async');
|
||||
|
||||
const metadata = require('../metadata/wrapper');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const { checkExpectedBucketOwner } = require('./apiUtils/authorization/bucketOwner');
|
||||
|
@ -21,12 +21,12 @@ function bucketDeleteEncryption(authInfo, request, log, callback) {
|
|||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'bucketDeleteEncryption',
|
||||
requestType: request.apiMethods || 'bucketDeleteEncryption',
|
||||
request,
|
||||
};
|
||||
|
||||
return async.waterfall([
|
||||
next => metadataValidateBucket(metadataValParams, log, next),
|
||||
next => metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, next),
|
||||
(bucket, next) => checkExpectedBucketOwner(request.headers, bucket, log, err => next(err, bucket)),
|
||||
(bucket, next) => {
|
||||
const sseConfig = bucket.getServerSideEncryption();
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const metadata = require('../metadata/wrapper');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
|
||||
|
@ -17,10 +17,10 @@ function bucketDeleteLifecycle(authInfo, request, log, callback) {
|
|||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'bucketDeleteLifecycle',
|
||||
requestType: request.apiMethods || 'bucketDeleteLifecycle',
|
||||
request,
|
||||
};
|
||||
return metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
return metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
|
||||
if (err) {
|
||||
log.debug('error processing request', {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const metadata = require('../metadata/wrapper');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
|
||||
/**
|
||||
|
@ -16,10 +16,10 @@ function bucketDeletePolicy(authInfo, request, log, callback) {
|
|||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'bucketDeletePolicy',
|
||||
requestType: request.apiMethods || 'bucketDeletePolicy',
|
||||
request,
|
||||
};
|
||||
return metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
return metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
|
||||
if (err) {
|
||||
log.debug('error processing request', {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const metadata = require('../metadata/wrapper');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
|
||||
|
@ -17,10 +17,10 @@ function bucketDeleteReplication(authInfo, request, log, callback) {
|
|||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'bucketDeleteReplication',
|
||||
requestType: request.apiMethods || 'bucketDeleteReplication',
|
||||
request,
|
||||
};
|
||||
return metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
return metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
|
||||
if (err) {
|
||||
log.debug('error processing request', {
|
||||
|
|
|
@ -25,7 +25,8 @@ function bucketDeleteWebsite(authInfo, request, log, callback) {
|
|||
}
|
||||
log.trace('found bucket in metadata');
|
||||
|
||||
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request)) {
|
||||
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo,
|
||||
log, request, request.actionImplicitDenies)) {
|
||||
log.debug('access denied for user on bucket', {
|
||||
requestType,
|
||||
method: 'bucketDeleteWebsite',
|
||||
|
|
|
@ -3,7 +3,7 @@ const { errors, versioning, s3middleware } = require('arsenal');
|
|||
|
||||
const constants = require('../../constants');
|
||||
const services = require('../services');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const escapeForXml = s3middleware.escapeForXml;
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
|
@ -345,7 +345,7 @@ function bucketGet(authInfo, request, log, callback) {
|
|||
listParams.marker = params.marker;
|
||||
}
|
||||
|
||||
metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||
request.method, bucket);
|
||||
if (err) {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const aclUtils = require('../utilities/aclUtils');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const vault = require('../auth/vault');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
|
@ -54,7 +54,7 @@ function bucketGetACL(authInfo, request, log, callback) {
|
|||
},
|
||||
};
|
||||
|
||||
metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||
request.method, bucket);
|
||||
if (err) {
|
||||
|
|
|
@ -34,7 +34,8 @@ function bucketGetCors(authInfo, request, log, callback) {
|
|||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||
request.method, bucket);
|
||||
|
||||
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request)) {
|
||||
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo,
|
||||
log, request, request.actionImplicitDenies)) {
|
||||
log.debug('access denied for user on bucket', {
|
||||
requestType,
|
||||
method: 'bucketGetCors',
|
||||
|
|
|
@ -4,7 +4,7 @@ const async = require('async');
|
|||
const { pushMetric } = require('../utapi/utilities');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const { checkExpectedBucketOwner } = require('./apiUtils/authorization/bucketOwner');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const escapeForXml = s3middleware.escapeForXml;
|
||||
|
||||
/**
|
||||
|
@ -27,7 +27,7 @@ function bucketGetEncryption(authInfo, request, log, callback) {
|
|||
};
|
||||
|
||||
return async.waterfall([
|
||||
next => metadataValidateBucket(metadataValParams, log, next),
|
||||
next => metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, next),
|
||||
(bucket, next) => checkExpectedBucketOwner(request.headers, bucket, log, err => next(err, bucket)),
|
||||
(bucket, next) => {
|
||||
// If sseInfo is present but the `mandatory` flag is not set
|
||||
|
|
|
@ -2,7 +2,7 @@ const { errors } = require('arsenal');
|
|||
const LifecycleConfiguration =
|
||||
require('arsenal').models.LifecycleConfiguration;
|
||||
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
|
||||
|
@ -23,7 +23,7 @@ function bucketGetLifecycle(authInfo, request, log, callback) {
|
|||
requestType: 'bucketGetLifecycle',
|
||||
request,
|
||||
};
|
||||
return metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
return metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
|
||||
if (err) {
|
||||
log.debug('error processing request', {
|
||||
|
|
|
@ -36,7 +36,8 @@ function bucketGetLocation(authInfo, request, log, callback) {
|
|||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||
request.method, bucket);
|
||||
|
||||
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request)) {
|
||||
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo,
|
||||
log, request, request.actionImplicitDenies)) {
|
||||
log.debug('access denied for account on bucket', {
|
||||
requestType,
|
||||
method: 'bucketGetLocation',
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const { NotificationConfiguration } = require('arsenal').models;
|
||||
|
@ -41,7 +41,7 @@ function bucketGetNotification(authInfo, request, log, callback) {
|
|||
request,
|
||||
};
|
||||
|
||||
return metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
return metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
|
||||
if (err) {
|
||||
log.debug('error processing request', {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
const { errors } = require('arsenal');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const ObjectLockConfiguration =
|
||||
|
@ -36,7 +36,7 @@ function bucketGetObjectLock(authInfo, request, log, callback) {
|
|||
requestType: 'bucketGetObjectLock',
|
||||
request,
|
||||
};
|
||||
return metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
return metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
|
||||
if (err) {
|
||||
log.debug('error processing request', {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const { errors } = require('arsenal');
|
||||
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
|
||||
/**
|
||||
|
@ -21,7 +21,7 @@ function bucketGetPolicy(authInfo, request, log, callback) {
|
|||
request,
|
||||
};
|
||||
|
||||
return metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
return metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
|
||||
if (err) {
|
||||
log.debug('error processing request', {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const { errors } = require('arsenal');
|
||||
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const { getReplicationConfigurationXML } =
|
||||
require('./apiUtils/bucket/getReplicationConfiguration');
|
||||
|
@ -23,7 +23,7 @@ function bucketGetReplication(authInfo, request, log, callback) {
|
|||
requestType: 'bucketGetReplication',
|
||||
request,
|
||||
};
|
||||
return metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
return metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
|
||||
if (err) {
|
||||
log.debug('error processing request', {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
|
||||
|
@ -57,7 +57,7 @@ function bucketGetVersioning(authInfo, request, log, callback) {
|
|||
request,
|
||||
};
|
||||
|
||||
metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||
request.method, bucket);
|
||||
if (err) {
|
||||
|
|
|
@ -34,7 +34,8 @@ function bucketGetWebsite(authInfo, request, log, callback) {
|
|||
|
||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||
request.method, bucket);
|
||||
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request)) {
|
||||
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo,
|
||||
log, request, request.actionImplicitDenies)) {
|
||||
log.debug('access denied for user on bucket', {
|
||||
requestType,
|
||||
method: 'bucketGetWebsite',
|
||||
|
|
|
@ -6,7 +6,7 @@ const aclUtils = require('../utilities/aclUtils');
|
|||
const { cleanUpBucket } = require('./apiUtils/bucket/bucketCreation');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const constants = require('../../constants');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const vault = require('../auth/vault');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
|
||||
|
@ -43,7 +43,7 @@ const { pushMetric } = require('../utapi/utilities');
|
|||
function bucketPutACL(authInfo, request, log, callback) {
|
||||
log.debug('processing request', { method: 'bucketPutACL' });
|
||||
|
||||
const bucketName = request.bucketName;
|
||||
const { bucketName } = request;
|
||||
const canonicalID = authInfo.getCanonicalID();
|
||||
const newCannedACL = request.headers['x-amz-acl'];
|
||||
const possibleCannedACL = [
|
||||
|
@ -53,17 +53,6 @@ function bucketPutACL(authInfo, request, log, callback) {
|
|||
'authenticated-read',
|
||||
'log-delivery-write',
|
||||
];
|
||||
if (newCannedACL && possibleCannedACL.indexOf(newCannedACL) === -1) {
|
||||
log.trace('invalid canned acl argument', {
|
||||
acl: newCannedACL,
|
||||
method: 'bucketPutACL',
|
||||
});
|
||||
return callback(errors.InvalidArgument);
|
||||
}
|
||||
if (!aclUtils.checkGrantHeaderValidity(request.headers)) {
|
||||
log.trace('invalid acl header');
|
||||
return callback(errors.InvalidArgument);
|
||||
}
|
||||
const possibleGroups = [constants.allAuthedUsersId,
|
||||
constants.publicId,
|
||||
constants.logId,
|
||||
|
@ -71,7 +60,7 @@ function bucketPutACL(authInfo, request, log, callback) {
|
|||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'bucketPutACL',
|
||||
requestType: request.apiMethods || 'bucketPutACL',
|
||||
request,
|
||||
};
|
||||
const possibleGrants = ['FULL_CONTROL', 'WRITE',
|
||||
|
@ -85,34 +74,41 @@ function bucketPutACL(authInfo, request, log, callback) {
|
|||
READ_ACP: [],
|
||||
};
|
||||
|
||||
const grantReadHeader =
|
||||
aclUtils.parseGrant(request.headers[
|
||||
'x-amz-grant-read'], 'READ');
|
||||
const grantWriteHeader =
|
||||
aclUtils.parseGrant(request.headers['x-amz-grant-write'], 'WRITE');
|
||||
const grantReadACPHeader =
|
||||
aclUtils.parseGrant(request.headers['x-amz-grant-read-acp'],
|
||||
'READ_ACP');
|
||||
const grantWriteACPHeader =
|
||||
aclUtils.parseGrant(request.headers['x-amz-grant-write-acp'],
|
||||
'WRITE_ACP');
|
||||
const grantFullControlHeader =
|
||||
aclUtils.parseGrant(request.headers['x-amz-grant-full-control'],
|
||||
'FULL_CONTROL');
|
||||
const grantReadHeader = aclUtils.parseGrant(request.headers[
|
||||
'x-amz-grant-read'], 'READ');
|
||||
const grantWriteHeader = aclUtils.parseGrant(request.headers['x-amz-grant-write'], 'WRITE');
|
||||
const grantReadACPHeader = aclUtils.parseGrant(request.headers['x-amz-grant-read-acp'],
|
||||
'READ_ACP');
|
||||
const grantWriteACPHeader = aclUtils.parseGrant(request.headers['x-amz-grant-write-acp'],
|
||||
'WRITE_ACP');
|
||||
const grantFullControlHeader = aclUtils.parseGrant(request.headers['x-amz-grant-full-control'],
|
||||
'FULL_CONTROL');
|
||||
|
||||
return async.waterfall([
|
||||
function waterfall1(next) {
|
||||
metadataValidateBucket(metadataValParams, log,
|
||||
(err, bucket) => {
|
||||
if (err) {
|
||||
log.trace('request authorization failed', {
|
||||
error: err,
|
||||
method: 'metadataValidateBucket',
|
||||
});
|
||||
return next(err, bucket);
|
||||
}
|
||||
return next(null, bucket);
|
||||
});
|
||||
metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket) => {
|
||||
if (err) {
|
||||
log.trace('request authorization failed', {
|
||||
error: err,
|
||||
method: 'metadataValidateBucket',
|
||||
});
|
||||
return next(err, bucket);
|
||||
}
|
||||
// if the API call is allowed, ensure that the parameters are valid
|
||||
if (newCannedACL && possibleCannedACL.indexOf(newCannedACL) === -1) {
|
||||
log.trace('invalid canned acl argument', {
|
||||
acl: newCannedACL,
|
||||
method: 'bucketPutACL',
|
||||
});
|
||||
return next(errors.InvalidArgument);
|
||||
}
|
||||
if (!aclUtils.checkGrantHeaderValidity(request.headers)) {
|
||||
log.trace('invalid acl header');
|
||||
return next(errors.InvalidArgument);
|
||||
}
|
||||
return next(null, bucket);
|
||||
});
|
||||
},
|
||||
function waterfall2(bucket, next) {
|
||||
// If not setting acl through headers, parse body
|
||||
|
@ -179,7 +175,7 @@ function bucketPutACL(authInfo, request, log, callback) {
|
|||
if (!skip && granteeType === 'Group') {
|
||||
if (possibleGroups.indexOf(grantee.URI[0]) < 0) {
|
||||
log.trace('invalid user group',
|
||||
{ userGroup: grantee.URI[0] });
|
||||
{ userGroup: grantee.URI[0] });
|
||||
return next(errors.InvalidArgument, bucket);
|
||||
}
|
||||
return usersIdentifiedByGroup.push({
|
||||
|
@ -193,22 +189,23 @@ function bucketPutACL(authInfo, request, log, callback) {
|
|||
} else {
|
||||
// If no canned ACL and no parsed xml, loop
|
||||
// through the access headers
|
||||
const allGrantHeaders =
|
||||
[].concat(grantReadHeader, grantWriteHeader,
|
||||
const allGrantHeaders = [].concat(grantReadHeader, grantWriteHeader,
|
||||
grantReadACPHeader, grantWriteACPHeader,
|
||||
grantFullControlHeader);
|
||||
|
||||
usersIdentifiedByEmail = allGrantHeaders.filter(item =>
|
||||
item && item.userIDType.toLowerCase() === 'emailaddress');
|
||||
usersIdentifiedByEmail = allGrantHeaders.filter(item => item
|
||||
&& item.userIDType.toLowerCase() === 'emailaddress');
|
||||
|
||||
usersIdentifiedByGroup = allGrantHeaders
|
||||
.filter(itm => itm && itm.userIDType
|
||||
.toLowerCase() === 'uri');
|
||||
.toLowerCase() === 'uri');
|
||||
for (let i = 0; i < usersIdentifiedByGroup.length; i++) {
|
||||
const userGroup = usersIdentifiedByGroup[i].identifier;
|
||||
if (possibleGroups.indexOf(userGroup) < 0) {
|
||||
log.trace('invalid user group', { userGroup,
|
||||
method: 'bucketPutACL' });
|
||||
log.trace('invalid user group', {
|
||||
userGroup,
|
||||
method: 'bucketPutACL',
|
||||
});
|
||||
return next(errors.InvalidArgument, bucket);
|
||||
}
|
||||
}
|
||||
|
@ -241,8 +238,8 @@ function bucketPutACL(authInfo, request, log, callback) {
|
|||
return vault.getCanonicalIds(justEmails, log,
|
||||
(err, results) => {
|
||||
if (err) {
|
||||
log.trace('error looking up canonical ids', {
|
||||
error: err, method: 'vault.getCanonicalIDs' });
|
||||
log.trace('error looking up canonical ids',
|
||||
{ error: err, method: 'vault.getCanonicalIDs' });
|
||||
return next(err, bucket);
|
||||
}
|
||||
const reconstructedUsersIdentifiedByEmail = aclUtils
|
||||
|
@ -251,7 +248,8 @@ function bucketPutACL(authInfo, request, log, callback) {
|
|||
const allUsers = [].concat(
|
||||
reconstructedUsersIdentifiedByEmail,
|
||||
usersIdentifiedByID,
|
||||
usersIdentifiedByGroup);
|
||||
usersIdentifiedByGroup,
|
||||
);
|
||||
const revisedAddACLParams = aclUtils
|
||||
.sortHeaderGrants(allUsers, addACLParams);
|
||||
return next(null, bucket, revisedAddACLParams);
|
||||
|
@ -259,9 +257,9 @@ function bucketPutACL(authInfo, request, log, callback) {
|
|||
}
|
||||
const allUsers = [].concat(
|
||||
usersIdentifiedByID,
|
||||
usersIdentifiedByGroup);
|
||||
const revisedAddACLParams =
|
||||
aclUtils.sortHeaderGrants(allUsers, addACLParams);
|
||||
usersIdentifiedByGroup,
|
||||
);
|
||||
const revisedAddACLParams = aclUtils.sortHeaderGrants(allUsers, addACLParams);
|
||||
return next(null, bucket, revisedAddACLParams);
|
||||
},
|
||||
function waterfall4(bucket, addACLParams, next) {
|
||||
|
@ -272,12 +270,10 @@ function bucketPutACL(authInfo, request, log, callback) {
|
|||
if (bucket.hasTransientFlag() || bucket.hasDeletedFlag()) {
|
||||
log.trace('transient or deleted flag so cleaning up bucket');
|
||||
bucket.setFullAcl(addACLParams);
|
||||
return cleanUpBucket(bucket, canonicalID, log, err =>
|
||||
next(err, bucket));
|
||||
return cleanUpBucket(bucket, canonicalID, log, err => next(err, bucket));
|
||||
}
|
||||
// If no bucket flags, just add acl's to bucket metadata
|
||||
return acl.addACL(bucket, addACLParams, log, err =>
|
||||
next(err, bucket));
|
||||
return acl.addACL(bucket, addACLParams, log, err => next(err, bucket));
|
||||
},
|
||||
], (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||
|
|
|
@ -4,8 +4,7 @@ const { errors } = require('arsenal');
|
|||
|
||||
const bucketShield = require('./apiUtils/bucket/bucketShield');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const { isBucketAuthorized } =
|
||||
require('./apiUtils/authorization/permissionChecks');
|
||||
const { isBucketAuthorized } = require('./apiUtils/authorization/permissionChecks');
|
||||
const metadata = require('../metadata/wrapper');
|
||||
const { parseCorsXml } = require('./apiUtils/bucket/bucketCors');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
|
@ -22,7 +21,7 @@ const requestType = 'bucketPutCors';
|
|||
*/
|
||||
function bucketPutCors(authInfo, request, log, callback) {
|
||||
log.debug('processing request', { method: 'bucketPutCors' });
|
||||
const bucketName = request.bucketName;
|
||||
const { bucketName } = request;
|
||||
const canonicalID = authInfo.getCanonicalID();
|
||||
|
||||
if (!request.post) {
|
||||
|
@ -66,7 +65,8 @@ function bucketPutCors(authInfo, request, log, callback) {
|
|||
});
|
||||
},
|
||||
function validateBucketAuthorization(bucket, rules, corsHeaders, next) {
|
||||
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request)) {
|
||||
if (!isBucketAuthorized(bucket, request.apiMethods || requestType, canonicalID, authInfo,
|
||||
log, request, request.actionImplicitDenies)) {
|
||||
log.debug('access denied for account on bucket', {
|
||||
requestType,
|
||||
});
|
||||
|
@ -77,8 +77,7 @@ function bucketPutCors(authInfo, request, log, callback) {
|
|||
function updateBucketMetadata(bucket, rules, corsHeaders, next) {
|
||||
log.trace('updating bucket cors rules in metadata');
|
||||
bucket.setCors(rules);
|
||||
metadata.updateBucket(bucketName, bucket, log, err =>
|
||||
next(err, corsHeaders));
|
||||
metadata.updateBucket(bucketName, bucket, log, err => next(err, corsHeaders));
|
||||
},
|
||||
], (err, corsHeaders) => {
|
||||
if (err) {
|
||||
|
|
|
@ -3,7 +3,7 @@ const async = require('async');
|
|||
const { parseEncryptionXml } = require('./apiUtils/bucket/bucketEncryption');
|
||||
const { checkExpectedBucketOwner } = require('./apiUtils/authorization/bucketOwner');
|
||||
const metadata = require('../metadata/wrapper');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const kms = require('../kms/wrapper');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
|
@ -18,17 +18,17 @@ const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
|||
*/
|
||||
|
||||
function bucketPutEncryption(authInfo, request, log, callback) {
|
||||
const bucketName = request.bucketName;
|
||||
const { bucketName } = request;
|
||||
|
||||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'bucketPutEncryption',
|
||||
requestType: request.apiMethods || 'bucketPutEncryption',
|
||||
request,
|
||||
};
|
||||
|
||||
return async.waterfall([
|
||||
next => metadataValidateBucket(metadataValParams, log, next),
|
||||
next => metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, next),
|
||||
(bucket, next) => checkExpectedBucketOwner(request.headers, bucket, log, err => next(err, bucket)),
|
||||
(bucket, next) => {
|
||||
log.trace('parsing encryption config', { method: 'bucketPutEncryption' });
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
const { waterfall } = require('async');
|
||||
const uuid = require('uuid/v4');
|
||||
const LifecycleConfiguration =
|
||||
require('arsenal').models.LifecycleConfiguration;
|
||||
const { LifecycleConfiguration } = require('arsenal').models;
|
||||
|
||||
const parseXML = require('../utilities/parseXML');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const metadata = require('../metadata/wrapper');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
|
||||
/**
|
||||
|
@ -21,11 +20,11 @@ const { pushMetric } = require('../utapi/utilities');
|
|||
function bucketPutLifecycle(authInfo, request, log, callback) {
|
||||
log.debug('processing request', { method: 'bucketPutLifecycle' });
|
||||
|
||||
const bucketName = request.bucketName;
|
||||
const { bucketName } = request;
|
||||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'bucketPutLifecycle',
|
||||
requestType: request.apiMethods || 'bucketPutLifecycle',
|
||||
request,
|
||||
};
|
||||
return waterfall([
|
||||
|
@ -42,7 +41,7 @@ function bucketPutLifecycle(authInfo, request, log, callback) {
|
|||
return next(null, configObj);
|
||||
});
|
||||
},
|
||||
(lcConfig, next) => metadataValidateBucket(metadataValParams, log,
|
||||
(lcConfig, next) => metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket) => {
|
||||
if (err) {
|
||||
return next(err, bucket);
|
||||
|
@ -54,8 +53,7 @@ function bucketPutLifecycle(authInfo, request, log, callback) {
|
|||
bucket.setUid(uuid());
|
||||
}
|
||||
bucket.setLifecycleConfiguration(lcConfig);
|
||||
metadata.updateBucket(bucket.getName(), bucket, log, err =>
|
||||
next(err, bucket));
|
||||
metadata.updateBucket(bucket.getName(), bucket, log, err => next(err, bucket));
|
||||
},
|
||||
], (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||
|
|
|
@ -4,7 +4,7 @@ const parseXML = require('../utilities/parseXML');
|
|||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const getNotificationConfiguration = require('./apiUtils/bucket/getNotificationConfiguration');
|
||||
const metadata = require('../metadata/wrapper');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
|
||||
/**
|
||||
|
@ -19,11 +19,11 @@ const { pushMetric } = require('../utapi/utilities');
|
|||
function bucketPutNotification(authInfo, request, log, callback) {
|
||||
log.debug('processing request', { method: 'bucketPutNotification' });
|
||||
|
||||
const bucketName = request.bucketName;
|
||||
const { bucketName } = request;
|
||||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'bucketPutNotification',
|
||||
requestType: request.apiMethods || 'bucketPutNotification',
|
||||
request,
|
||||
};
|
||||
|
||||
|
@ -34,7 +34,7 @@ function bucketPutNotification(authInfo, request, log, callback) {
|
|||
const notifConfig = notificationConfig.error ? undefined : notificationConfig;
|
||||
process.nextTick(() => next(notificationConfig.error, notifConfig));
|
||||
},
|
||||
(notifConfig, next) => metadataValidateBucket(metadataValParams, log,
|
||||
(notifConfig, next) => metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket) => next(err, bucket, notifConfig)),
|
||||
(bucket, notifConfig, next) => {
|
||||
bucket.setNotificationConfiguration(notifConfig);
|
||||
|
@ -45,8 +45,10 @@ function bucketPutNotification(authInfo, request, log, callback) {
|
|||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||
request.method, bucket);
|
||||
if (err) {
|
||||
log.trace('error processing request', { error: err,
|
||||
method: 'bucketPutNotification' });
|
||||
log.trace('error processing request', {
|
||||
error: err,
|
||||
method: 'bucketPutNotification',
|
||||
});
|
||||
return callback(err, corsHeaders);
|
||||
}
|
||||
pushMetric('putBucketNotification', log, {
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
const { waterfall } = require('async');
|
||||
const arsenal = require('arsenal');
|
||||
|
||||
const errors = arsenal.errors;
|
||||
const ObjectLockConfiguration = arsenal.models.ObjectLockConfiguration;
|
||||
const { errors } = arsenal;
|
||||
const { ObjectLockConfiguration } = arsenal.models;
|
||||
|
||||
const parseXML = require('../utilities/parseXML');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const metadata = require('../metadata/wrapper');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
|
||||
/**
|
||||
|
@ -22,11 +22,11 @@ const { pushMetric } = require('../utapi/utilities');
|
|||
function bucketPutObjectLock(authInfo, request, log, callback) {
|
||||
log.debug('processing request', { method: 'bucketPutObjectLock' });
|
||||
|
||||
const bucketName = request.bucketName;
|
||||
const { bucketName } = request;
|
||||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'bucketPutObjectLock',
|
||||
requestType: request.apiMethods || 'bucketPutObjectLock',
|
||||
request,
|
||||
};
|
||||
return waterfall([
|
||||
|
@ -36,12 +36,12 @@ function bucketPutObjectLock(authInfo, request, log, callback) {
|
|||
// if there was an error getting object lock configuration,
|
||||
// returned configObj will contain 'error' key
|
||||
process.nextTick(() => {
|
||||
const configObj = lockConfigClass.
|
||||
getValidatedObjectLockConfiguration();
|
||||
const configObj = lockConfigClass
|
||||
.getValidatedObjectLockConfiguration();
|
||||
return next(configObj.error || null, configObj);
|
||||
});
|
||||
},
|
||||
(objectLockConfig, next) => metadataValidateBucket(metadataValParams,
|
||||
(objectLockConfig, next) => metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies,
|
||||
log, (err, bucket) => {
|
||||
if (err) {
|
||||
return next(err, bucket);
|
||||
|
@ -53,23 +53,25 @@ function bucketPutObjectLock(authInfo, request, log, callback) {
|
|||
process.nextTick(() => {
|
||||
if (!isObjectLockEnabled) {
|
||||
return next(errors.InvalidBucketState.customizeDescription(
|
||||
'Object Lock configuration cannot be enabled on ' +
|
||||
'existing buckets'), bucket);
|
||||
'Object Lock configuration cannot be enabled on '
|
||||
+ 'existing buckets',
|
||||
), bucket);
|
||||
}
|
||||
return next(null, bucket, objectLockConfig);
|
||||
});
|
||||
},
|
||||
(bucket, objectLockConfig, next) => {
|
||||
bucket.setObjectLockConfiguration(objectLockConfig);
|
||||
metadata.updateBucket(bucket.getName(), bucket, log, err =>
|
||||
next(err, bucket));
|
||||
metadata.updateBucket(bucket.getName(), bucket, log, err => next(err, bucket));
|
||||
},
|
||||
], (err, bucket) => {
|
||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||
request.method, bucket);
|
||||
if (err) {
|
||||
log.trace('error processing request', { error: err,
|
||||
method: 'bucketPutObjectLock' });
|
||||
log.trace('error processing request', {
|
||||
error: err,
|
||||
method: 'bucketPutObjectLock',
|
||||
});
|
||||
return callback(err, corsHeaders);
|
||||
}
|
||||
pushMetric('putBucketObjectLock', log, {
|
||||
|
|
|
@ -3,7 +3,7 @@ const { errors, models } = require('arsenal');
|
|||
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const metadata = require('../metadata/wrapper');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { validatePolicyResource } =
|
||||
require('./apiUtils/authorization/permissionChecks');
|
||||
const { BucketPolicy } = models;
|
||||
|
@ -17,8 +17,7 @@ const { BucketPolicy } = models;
|
|||
function _checkNotImplementedPolicy(policyString) {
|
||||
// bucket names and key names cannot include "", so including those
|
||||
// isolates not implemented keys
|
||||
return policyString.includes('"Condition"')
|
||||
|| policyString.includes('"Service"')
|
||||
return policyString.includes('"Service"')
|
||||
|| policyString.includes('"Federated"');
|
||||
}
|
||||
|
||||
|
@ -37,7 +36,7 @@ function bucketPutPolicy(authInfo, request, log, callback) {
|
|||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'bucketPutPolicy',
|
||||
requestType: request.apiMethods || 'bucketPutPolicy',
|
||||
request,
|
||||
};
|
||||
|
||||
|
@ -70,7 +69,7 @@ function bucketPutPolicy(authInfo, request, log, callback) {
|
|||
return next(null, bucketPolicy);
|
||||
});
|
||||
},
|
||||
(bucketPolicy, next) => metadataValidateBucket(metadataValParams, log,
|
||||
(bucketPolicy, next) => metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket) => {
|
||||
if (err) {
|
||||
return next(err, bucket);
|
||||
|
|
|
@ -2,7 +2,7 @@ const { waterfall } = require('async');
|
|||
const { errors } = require('arsenal');
|
||||
|
||||
const metadata = require('../metadata/wrapper');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const { getReplicationConfiguration } =
|
||||
require('./apiUtils/bucket/getReplicationConfiguration');
|
||||
|
@ -27,7 +27,7 @@ function bucketPutReplication(authInfo, request, log, callback) {
|
|||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'bucketPutReplication',
|
||||
requestType: request.apiMethods || 'bucketPutReplication',
|
||||
request,
|
||||
};
|
||||
return waterfall([
|
||||
|
@ -36,7 +36,7 @@ function bucketPutReplication(authInfo, request, log, callback) {
|
|||
// Check bucket user privileges and ensure versioning is 'Enabled'.
|
||||
(config, next) =>
|
||||
// TODO: Validate that destination bucket exists and has versioning.
|
||||
metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ const { errors } = require('arsenal');
|
|||
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const metadata = require('../metadata/wrapper');
|
||||
const { metadataValidateBucket } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const versioningNotImplBackends =
|
||||
require('../../constants').versioningNotImplBackends;
|
||||
|
@ -87,13 +87,13 @@ function bucketPutVersioning(authInfo, request, log, callback) {
|
|||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'bucketPutVersioning',
|
||||
requestType: request.apiMethods || 'bucketPutVersioning',
|
||||
request,
|
||||
};
|
||||
|
||||
return waterfall([
|
||||
next => _parseXML(request, log, next),
|
||||
next => metadataValidateBucket(metadataValParams, log,
|
||||
next => metadataValidateBucketNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket) => next(err, bucket)), // ignore extra null object,
|
||||
(bucket, next) => parseString(request.post, (err, result) => {
|
||||
// just for linting; there should not be any parsing error here
|
||||
|
|
|
@ -46,7 +46,8 @@ function bucketPutWebsite(authInfo, request, log, callback) {
|
|||
});
|
||||
},
|
||||
function validateBucketAuthorization(bucket, config, next) {
|
||||
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request)) {
|
||||
if (!isBucketAuthorized(bucket, request.apiMethods || requestType, canonicalID, authInfo,
|
||||
log, request, request.actionImplicitDenies)) {
|
||||
log.debug('access denied for user on bucket', {
|
||||
requestType,
|
||||
method: 'bucketPutWebsite',
|
||||
|
|
|
@ -11,7 +11,7 @@ const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
|||
const metadata = require('../metadata/wrapper');
|
||||
const services = require('../services');
|
||||
const vault = require('../auth/vault');
|
||||
const { isBucketAuthorized } =
|
||||
const { isBucketAuthorized, evaluateBucketPolicyWithIAM } =
|
||||
require('./apiUtils/authorization/permissionChecks');
|
||||
const { preprocessingVersioningDelete }
|
||||
= require('./apiUtils/object/versioning');
|
||||
|
@ -385,15 +385,57 @@ function multiObjectDelete(authInfo, request, log, callback) {
|
|||
return next(null, quietSetting, objects);
|
||||
});
|
||||
},
|
||||
function checkPolicies(quietSetting, objects, next) {
|
||||
function checkBucketMetadata(quietSetting, inPlay, next) {
|
||||
const errorResults = [];
|
||||
// if no objects in play, no need to check ACLs / get metadata,
|
||||
// just move on if there is no Origin header
|
||||
if (inPlay.length === 0 && !request.headers.origin) {
|
||||
return next(null, quietSetting, errorResults, inPlay,
|
||||
undefined);
|
||||
}
|
||||
return metadata.getBucket(bucketName, log, (err, bucketMD) => {
|
||||
if (err) {
|
||||
log.trace('error retrieving bucket metadata',
|
||||
{ error: err });
|
||||
return next(err);
|
||||
}
|
||||
// check whether bucket has transient or deleted flag
|
||||
if (bucketShield(bucketMD, 'objectDelete')) {
|
||||
return next(errors.NoSuchBucket);
|
||||
}
|
||||
// if no objects in play, no need to check ACLs
|
||||
if (inPlay.length === 0) {
|
||||
return next(null, quietSetting, errorResults, inPlay,
|
||||
bucketMD);
|
||||
}
|
||||
if (!isBucketAuthorized(bucketMD, 'objectDelete', canonicalID, authInfo, log, request,
|
||||
request.actionImplicitDenies)) {
|
||||
log.trace("access denied due to bucket acl's");
|
||||
// if access denied at the bucket level, no access for
|
||||
// any of the objects so all results will be error results
|
||||
inPlay.forEach(entry => {
|
||||
errorResults.push({
|
||||
entry,
|
||||
error: errors.AccessDenied,
|
||||
});
|
||||
});
|
||||
// by sending an empty array as the inPlay array
|
||||
// async.forEachLimit below will not actually
|
||||
// make any calls to metadata or data but will continue on
|
||||
// to the next step to build xml
|
||||
return next(null, quietSetting, errorResults, [], bucketMD);
|
||||
}
|
||||
return next(null, quietSetting, errorResults, inPlay, bucketMD);
|
||||
});
|
||||
},
|
||||
function checkPolicies(quietSetting, errorResults, objects, bucketMD, next) {
|
||||
// track keys that are still on track to be deleted
|
||||
const inPlay = [];
|
||||
const errorResults = [];
|
||||
// if request from account, no need to check policies
|
||||
// all objects are inPlay so send array of object keys
|
||||
// as inPlay argument
|
||||
if (!authInfo.isRequesterAnIAMUser()) {
|
||||
return next(null, quietSetting, errorResults, objects);
|
||||
return next(null, quietSetting, errorResults, objects, bucketMD);
|
||||
}
|
||||
|
||||
// TODO: once arsenal's extractParams is separated from doAuth
|
||||
|
@ -437,7 +479,7 @@ function multiObjectDelete(authInfo, request, log, callback) {
|
|||
error: errors.AccessDenied });
|
||||
});
|
||||
// send empty array for inPlay
|
||||
return next(null, quietSetting, errorResults, []);
|
||||
return next(null, quietSetting, errorResults, [], bucketMD);
|
||||
}
|
||||
if (err) {
|
||||
log.trace('error checking policies', {
|
||||
|
@ -455,6 +497,17 @@ function multiObjectDelete(authInfo, request, log, callback) {
|
|||
});
|
||||
return next(errors.InternalError);
|
||||
}
|
||||
// Convert authorization results into an easier to handle format
|
||||
const actionImplicitDenies = authorizationResults.reduce((acc, curr, idx) => {
|
||||
if (requestContextParams[idx]) {
|
||||
const apiMethod = requestContextParams[idx].apiMethod;
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
acc[apiMethod] = curr.isImplicit;
|
||||
} else {
|
||||
log.trace(`Missing or incorrect requestContext structure for index ${idx}`);
|
||||
}
|
||||
return acc;
|
||||
}, {});
|
||||
for (let i = 0; i < authorizationResults.length; i++) {
|
||||
const result = authorizationResults[i];
|
||||
// result is { isAllowed: true,
|
||||
|
@ -470,7 +523,25 @@ function multiObjectDelete(authInfo, request, log, callback) {
|
|||
key: result.arn.slice(slashIndex + 1),
|
||||
versionId: result.versionId,
|
||||
};
|
||||
if (result.isAllowed) {
|
||||
// Deny immediately if there is an explicit deny
|
||||
if (!result.isImplicit && !result.isAllowed) {
|
||||
errorResults.push({
|
||||
entry,
|
||||
error: errors.AccessDenied,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
// Evaluate against the bucket policies
|
||||
const areAllActionsAllowed = evaluateBucketPolicyWithIAM(
|
||||
bucketMD,
|
||||
Object.keys(actionImplicitDenies),
|
||||
canonicalID,
|
||||
authInfo,
|
||||
actionImplicitDenies,
|
||||
log,
|
||||
request);
|
||||
|
||||
if (areAllActionsAllowed) {
|
||||
inPlay.push(entry);
|
||||
} else {
|
||||
errorResults.push({
|
||||
|
@ -479,50 +550,9 @@ function multiObjectDelete(authInfo, request, log, callback) {
|
|||
});
|
||||
}
|
||||
}
|
||||
return next(null, quietSetting, errorResults, inPlay);
|
||||
return next(null, quietSetting, errorResults, inPlay, bucketMD);
|
||||
});
|
||||
},
|
||||
function checkBucketMetadata(quietSetting, errorResults, inPlay, next) {
|
||||
// if no objects in play, no need to check ACLs / get metadata,
|
||||
// just move on if there is no Origin header
|
||||
if (inPlay.length === 0 && !request.headers.origin) {
|
||||
return next(null, quietSetting, errorResults, inPlay,
|
||||
undefined);
|
||||
}
|
||||
return metadata.getBucket(bucketName, log, (err, bucketMD) => {
|
||||
if (err) {
|
||||
log.trace('error retrieving bucket metadata',
|
||||
{ error: err });
|
||||
return next(err);
|
||||
}
|
||||
// check whether bucket has transient or deleted flag
|
||||
if (bucketShield(bucketMD, 'objectDelete')) {
|
||||
return next(errors.NoSuchBucket);
|
||||
}
|
||||
// if no objects in play, no need to check ACLs
|
||||
if (inPlay.length === 0) {
|
||||
return next(null, quietSetting, errorResults, inPlay,
|
||||
bucketMD);
|
||||
}
|
||||
if (!isBucketAuthorized(bucketMD, 'objectDelete', canonicalID, authInfo, log, request)) {
|
||||
log.trace("access denied due to bucket acl's");
|
||||
// if access denied at the bucket level, no access for
|
||||
// any of the objects so all results will be error results
|
||||
inPlay.forEach(entry => {
|
||||
errorResults.push({
|
||||
entry,
|
||||
error: errors.AccessDenied,
|
||||
});
|
||||
});
|
||||
// by sending an empty array as the inPlay array
|
||||
// async.forEachLimit below will not actually
|
||||
// make any calls to metadata or data but will continue on
|
||||
// to the next step to build xml
|
||||
return next(null, quietSetting, errorResults, [], bucketMD);
|
||||
}
|
||||
return next(null, quietSetting, errorResults, inPlay, bucketMD);
|
||||
});
|
||||
},
|
||||
function getObjMetadataAndDeleteStep(quietSetting, errorResults, inPlay,
|
||||
bucket, next) {
|
||||
return getObjMetadataAndDelete(authInfo, canonicalID, request,
|
||||
|
|
|
@ -8,7 +8,7 @@ const { pushMetric } = require('../utapi/utilities');
|
|||
const createAndStoreObject = require('./apiUtils/object/createAndStoreObject');
|
||||
const { decodeVersionId, preprocessingVersioningDelete }
|
||||
= require('./apiUtils/object/versioning');
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketAndObjNew } = require('../metadata/metadataUtils');
|
||||
const { hasGovernanceBypassHeader, checkUserGovernanceBypass, ObjectLockInfo }
|
||||
= require('./apiUtils/object/objectLockHelpers');
|
||||
const { config } = require('../Config');
|
||||
|
@ -49,15 +49,15 @@ function objectDelete(authInfo, request, log, cb) {
|
|||
bucketName,
|
||||
objectKey,
|
||||
versionId: reqVersionId,
|
||||
requestType: 'objectDelete',
|
||||
requestType: request.apiMethods || 'objectDelete',
|
||||
request,
|
||||
};
|
||||
|
||||
const canonicalID = authInfo.getCanonicalID();
|
||||
return async.waterfall([
|
||||
function validateBucketAndObj(next) {
|
||||
return metadataValidateBucketAndObj(valParams, log,
|
||||
(err, bucketMD, objMD) => {
|
||||
return metadataValidateBucketAndObjNew(valParams, request.actionImplicitDenies, log,
|
||||
(err, bucketMD, objMD) => {
|
||||
if (err) {
|
||||
return next(err, bucketMD);
|
||||
}
|
||||
|
|
|
@ -4,7 +4,7 @@ const { errors } = require('arsenal');
|
|||
const { decodeVersionId, getVersionIdResHeader }
|
||||
= require('./apiUtils/object/versioning');
|
||||
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketAndObjNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const metadata = require('../metadata/wrapper');
|
||||
|
@ -40,13 +40,13 @@ function objectDeleteTagging(authInfo, request, log, callback) {
|
|||
authInfo,
|
||||
bucketName,
|
||||
objectKey,
|
||||
requestType: 'objectDeleteTagging',
|
||||
requestType: request.apiMethods || 'objectDeleteTagging',
|
||||
versionId: reqVersionId,
|
||||
request,
|
||||
};
|
||||
|
||||
return async.waterfall([
|
||||
next => metadataValidateBucketAndObj(metadataValParams, log,
|
||||
next => metadataValidateBucketAndObjNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket, objectMD) => {
|
||||
if (err) {
|
||||
log.trace('request authorization failed',
|
||||
|
|
|
@ -9,7 +9,7 @@ const collectResponseHeaders = require('../utilities/collectResponseHeaders');
|
|||
const { pushMetric } = require('../utapi/utilities');
|
||||
const { getVersionIdResHeader } = require('./apiUtils/object/versioning');
|
||||
const setPartRanges = require('./apiUtils/object/setPartRanges');
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketAndObjNew } = require('../metadata/metadataUtils');
|
||||
const { getPartCountFromMd5 } = require('./apiUtils/object/partInfo');
|
||||
const { setExpirationHeaders } = require('./apiUtils/object/expirationHeaders');
|
||||
|
||||
|
@ -48,7 +48,7 @@ function objectGet(authInfo, request, returnTagCount, log, callback) {
|
|||
request,
|
||||
};
|
||||
|
||||
return metadataValidateBucketAndObj(mdValParams, log,
|
||||
return metadataValidateBucketAndObjNew(mdValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket, objMD) => {
|
||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||
request.method, bucket);
|
||||
|
|
|
@ -7,7 +7,7 @@ const { pushMetric } = require('../utapi/utilities');
|
|||
const { decodeVersionId, getVersionIdResHeader }
|
||||
= require('./apiUtils/object/versioning');
|
||||
const vault = require('../auth/vault');
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketAndObjNew } = require('../metadata/metadataUtils');
|
||||
|
||||
// Sample XML response:
|
||||
/*
|
||||
|
@ -71,7 +71,7 @@ function objectGetACL(authInfo, request, log, callback) {
|
|||
|
||||
return async.waterfall([
|
||||
function validateBucketAndObj(next) {
|
||||
return metadataValidateBucketAndObj(metadataValParams, log,
|
||||
return metadataValidateBucketAndObjNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket, objectMD) => {
|
||||
if (err) {
|
||||
log.trace('request authorization failed',
|
||||
|
|
|
@ -4,7 +4,7 @@ const { errors, s3middleware } = require('arsenal');
|
|||
const { decodeVersionId, getVersionIdResHeader }
|
||||
= require('./apiUtils/object/versioning');
|
||||
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketAndObjNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
|
||||
|
@ -43,7 +43,7 @@ function objectGetLegalHold(authInfo, request, log, callback) {
|
|||
};
|
||||
|
||||
return async.waterfall([
|
||||
next => metadataValidateBucketAndObj(metadataValParams, log,
|
||||
next => metadataValidateBucketAndObjNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket, objectMD) => {
|
||||
if (err) {
|
||||
log.trace('request authorization failed',
|
||||
|
|
|
@ -4,7 +4,7 @@ const { errors, s3middleware } = require('arsenal');
|
|||
const { decodeVersionId, getVersionIdResHeader }
|
||||
= require('./apiUtils/object/versioning');
|
||||
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketAndObjNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
|
||||
|
@ -43,7 +43,7 @@ function objectGetRetention(authInfo, request, log, callback) {
|
|||
};
|
||||
|
||||
return async.waterfall([
|
||||
next => metadataValidateBucketAndObj(metadataValParams, log,
|
||||
next => metadataValidateBucketAndObjNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket, objectMD) => {
|
||||
if (err) {
|
||||
log.trace('request authorization failed',
|
||||
|
|
|
@ -4,7 +4,7 @@ const { errors, s3middleware } = require('arsenal');
|
|||
const { decodeVersionId, getVersionIdResHeader }
|
||||
= require('./apiUtils/object/versioning');
|
||||
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketAndObjNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const { convertToXml } = s3middleware.tagging;
|
||||
|
@ -43,7 +43,7 @@ function objectGetTagging(authInfo, request, log, callback) {
|
|||
};
|
||||
|
||||
return async.waterfall([
|
||||
next => metadataValidateBucketAndObj(metadataValParams, log,
|
||||
next => metadataValidateBucketAndObjNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket, objectMD) => {
|
||||
if (err) {
|
||||
log.trace('request authorization failed',
|
||||
|
|
|
@ -7,7 +7,7 @@ const { getObjectSSEConfiguration } = require('./apiUtils/bucket/bucketEncryptio
|
|||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const createAndStoreObject = require('./apiUtils/object/createAndStoreObject');
|
||||
const { checkQueryVersionId } = require('./apiUtils/object/versioning');
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketAndObjNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const { validateHeaders } = require('./apiUtils/object/objectLockHelpers');
|
||||
const { hasNonPrintables } = require('../utilities/stringChecks');
|
||||
|
@ -57,7 +57,7 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
|
|||
}
|
||||
const invalidSSEError = errors.InvalidArgument.customizeDescription(
|
||||
'The encryption method specified is not supported');
|
||||
const requestType = 'objectPut';
|
||||
const requestType = request.apiMethods || 'objectPut';
|
||||
const valParams = { authInfo, bucketName, objectKey, requestType, request };
|
||||
const canonicalID = authInfo.getCanonicalID();
|
||||
|
||||
|
@ -68,8 +68,7 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
|
|||
}
|
||||
|
||||
log.trace('owner canonicalID to send to data', { canonicalID });
|
||||
|
||||
return metadataValidateBucketAndObj(valParams, log,
|
||||
return metadataValidateBucketAndObjNew(valParams, request.actionImplicitDenies, log,
|
||||
(err, bucket, objMD) => {
|
||||
const responseHeaders = collectCorsHeaders(headers.origin,
|
||||
method, bucket);
|
||||
|
|
|
@ -7,9 +7,8 @@ const { pushMetric } = require('../utapi/utilities');
|
|||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const constants = require('../../constants');
|
||||
const vault = require('../auth/vault');
|
||||
const { decodeVersionId, getVersionIdResHeader }
|
||||
= require('./apiUtils/object/versioning');
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { decodeVersionId, getVersionIdResHeader } = require('./apiUtils/object/versioning');
|
||||
const { metadataValidateBucketAndObjNew } = require('../metadata/metadataUtils');
|
||||
|
||||
/*
|
||||
Format of xml request:
|
||||
|
@ -43,8 +42,8 @@ const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
|||
*/
|
||||
function objectPutACL(authInfo, request, log, cb) {
|
||||
log.debug('processing request', { method: 'objectPutACL' });
|
||||
const bucketName = request.bucketName;
|
||||
const objectKey = request.objectKey;
|
||||
const { bucketName } = request;
|
||||
const { objectKey } = request;
|
||||
const newCannedACL = request.headers['x-amz-acl'];
|
||||
const possibleCannedACL = [
|
||||
'private',
|
||||
|
@ -82,8 +81,8 @@ function objectPutACL(authInfo, request, log, cb) {
|
|||
authInfo,
|
||||
bucketName,
|
||||
objectKey,
|
||||
requestType: 'objectPutACL',
|
||||
versionId: reqVersionId,
|
||||
requestType: request.apiMethods || 'objectPutACL',
|
||||
};
|
||||
|
||||
const possibleGrants = ['FULL_CONTROL', 'WRITE_ACP', 'READ', 'READ_ACP'];
|
||||
|
@ -95,26 +94,26 @@ function objectPutACL(authInfo, request, log, cb) {
|
|||
READ_ACP: [],
|
||||
};
|
||||
|
||||
const grantReadHeader =
|
||||
aclUtils.parseGrant(request.headers['x-amz-grant-read'], 'READ');
|
||||
const grantReadACPHeader =
|
||||
aclUtils.parseGrant(request.headers['x-amz-grant-read-acp'],
|
||||
'READ_ACP');
|
||||
const grantReadHeader = aclUtils.parseGrant(request.headers['x-amz-grant-read'], 'READ');
|
||||
const grantReadACPHeader = aclUtils.parseGrant(request.headers['x-amz-grant-read-acp'],
|
||||
'READ_ACP');
|
||||
const grantWriteACPHeader = aclUtils.parseGrant(
|
||||
request.headers['x-amz-grant-write-acp'], 'WRITE_ACP');
|
||||
request.headers['x-amz-grant-write-acp'], 'WRITE_ACP',
|
||||
);
|
||||
const grantFullControlHeader = aclUtils.parseGrant(
|
||||
request.headers['x-amz-grant-full-control'], 'FULL_CONTROL');
|
||||
request.headers['x-amz-grant-full-control'], 'FULL_CONTROL',
|
||||
);
|
||||
|
||||
return async.waterfall([
|
||||
function validateBucketAndObj(next) {
|
||||
return metadataValidateBucketAndObj(metadataValParams, log,
|
||||
return metadataValidateBucketAndObjNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket, objectMD) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
if (!objectMD) {
|
||||
const err = reqVersionId ? errors.NoSuchVersion :
|
||||
errors.NoSuchKey;
|
||||
const err = reqVersionId ? errors.NoSuchVersion
|
||||
: errors.NoSuchKey;
|
||||
return next(err, bucket);
|
||||
}
|
||||
if (objectMD.isDeleteMarker) {
|
||||
|
@ -202,7 +201,7 @@ function objectPutACL(authInfo, request, log, cb) {
|
|||
if (!skip && granteeType === 'Group') {
|
||||
if (possibleGroups.indexOf(grantee.URI[0]) < 0) {
|
||||
log.trace('invalid user group',
|
||||
{ userGroup: grantee.URI[0] });
|
||||
{ userGroup: grantee.URI[0] });
|
||||
return next(errors.InvalidArgument, bucket);
|
||||
}
|
||||
return usersIdentifiedByGroup.push({
|
||||
|
@ -216,22 +215,24 @@ function objectPutACL(authInfo, request, log, cb) {
|
|||
} else {
|
||||
// If no canned ACL and no parsed xml, loop
|
||||
// through the access headers
|
||||
const allGrantHeaders =
|
||||
[].concat(grantReadHeader,
|
||||
const allGrantHeaders = [].concat(grantReadHeader,
|
||||
grantReadACPHeader, grantWriteACPHeader,
|
||||
grantFullControlHeader);
|
||||
|
||||
usersIdentifiedByEmail = allGrantHeaders.filter(item =>
|
||||
item && item.userIDType.toLowerCase() === 'emailaddress');
|
||||
usersIdentifiedByEmail = allGrantHeaders.filter(item => item
|
||||
&& item.userIDType.toLowerCase() === 'emailaddress');
|
||||
usersIdentifiedByGroup = allGrantHeaders
|
||||
.filter(itm => itm && itm.userIDType
|
||||
.toLowerCase() === 'uri');
|
||||
for (let i = 0; i < usersIdentifiedByGroup.length; i++) {
|
||||
.toLowerCase() === 'uri');
|
||||
for (let i = 0; i < usersIdentifiedByGroup.length; i += 1) {
|
||||
if (possibleGroups.indexOf(
|
||||
usersIdentifiedByGroup[i].identifier) < 0) {
|
||||
usersIdentifiedByGroup[i].identifier,
|
||||
) < 0) {
|
||||
log.trace('invalid user group',
|
||||
{ userGroup: usersIdentifiedByGroup[i]
|
||||
.identifier });
|
||||
{
|
||||
userGroup: usersIdentifiedByGroup[i]
|
||||
.identifier,
|
||||
});
|
||||
return next(errors.InvalidArgument, bucket);
|
||||
}
|
||||
}
|
||||
|
@ -259,18 +260,20 @@ function objectPutACL(authInfo, request, log, cb) {
|
|||
const allUsers = [].concat(
|
||||
reconstructedUsersIdentifiedByEmail,
|
||||
usersIdentifiedByID,
|
||||
usersIdentifiedByGroup);
|
||||
usersIdentifiedByGroup,
|
||||
);
|
||||
const revisedAddACLParams = aclUtils
|
||||
.sortHeaderGrants(allUsers, addACLParams);
|
||||
return next(null, bucket, objectMD,
|
||||
revisedAddACLParams);
|
||||
});
|
||||
},
|
||||
);
|
||||
}
|
||||
const allUsers = [].concat(
|
||||
usersIdentifiedByID,
|
||||
usersIdentifiedByGroup);
|
||||
const revisedAddACLParams =
|
||||
aclUtils.sortHeaderGrants(allUsers, addACLParams);
|
||||
usersIdentifiedByGroup,
|
||||
);
|
||||
const revisedAddACLParams = aclUtils.sortHeaderGrants(allUsers, addACLParams);
|
||||
return next(null, bucket, objectMD, revisedAddACLParams);
|
||||
},
|
||||
function addAclsToObjMD(bucket, objectMD, ACLParams, next) {
|
||||
|
@ -292,8 +295,7 @@ function objectPutACL(authInfo, request, log, cb) {
|
|||
}
|
||||
|
||||
const verCfg = bucket.getVersioningConfiguration();
|
||||
resHeaders['x-amz-version-id'] =
|
||||
getVersionIdResHeader(verCfg, objectMD);
|
||||
resHeaders['x-amz-version-id'] = getVersionIdResHeader(verCfg, objectMD);
|
||||
|
||||
log.trace('processed request successfully in object put acl api');
|
||||
pushMetric('putObjectAcl', log, {
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
const async = require('async');
|
||||
const { errors, versioning, s3middleware } = require('arsenal');
|
||||
|
||||
const validateHeaders = s3middleware.validateConditionalHeaders;
|
||||
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const constants = require('../../constants');
|
||||
const { data } = require('../data/wrapper');
|
||||
const locationConstraintCheck =
|
||||
require('./apiUtils/object/locationConstraintCheck');
|
||||
const locationConstraintCheck = require('./apiUtils/object/locationConstraintCheck');
|
||||
const metadata = require('../metadata/wrapper');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const logger = require('../utilities/logger');
|
||||
const services = require('../services');
|
||||
const setUpCopyLocator = require('./apiUtils/object/setUpCopyLocator');
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketAndObjNew } = require('../metadata/metadataUtils');
|
||||
|
||||
const versionIdUtils = versioning.VersionID;
|
||||
const { config } = require('../Config');
|
||||
|
@ -58,8 +58,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
// Note that keys in the query object retain their case, so
|
||||
// request.query.uploadId must be called with that exact
|
||||
// capitalization
|
||||
const uploadId = request.query.uploadId;
|
||||
|
||||
const { uploadId } = request.query;
|
||||
const valPutParams = {
|
||||
authInfo,
|
||||
bucketName: destBucketName,
|
||||
|
@ -89,26 +88,26 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
|
||||
return async.waterfall([
|
||||
function checkDestAuth(next) {
|
||||
return metadataValidateBucketAndObj(valPutParams, log,
|
||||
return metadataValidateBucketAndObjNew(valPutParams, request.actionImplicitDenies, log,
|
||||
(err, destBucketMD) => {
|
||||
if (err) {
|
||||
log.debug('error validating authorization for ' +
|
||||
'destination bucket',
|
||||
{ error: err });
|
||||
log.debug('error validating authorization for '
|
||||
+ 'destination bucket',
|
||||
{ error: err });
|
||||
return next(err, destBucketMD);
|
||||
}
|
||||
const flag = destBucketMD.hasDeletedFlag()
|
||||
|| destBucketMD.hasTransientFlag();
|
||||
if (flag) {
|
||||
log.trace('deleted flag or transient flag ' +
|
||||
'on destination bucket', { flag });
|
||||
log.trace('deleted flag or transient flag '
|
||||
+ 'on destination bucket', { flag });
|
||||
return next(errors.NoSuchBucket);
|
||||
}
|
||||
return next(null, destBucketMD);
|
||||
});
|
||||
},
|
||||
function checkSourceAuthorization(destBucketMD, next) {
|
||||
return metadataValidateBucketAndObj(valGetParams, log,
|
||||
return metadataValidateBucketAndObjNew(valGetParams, request.actionImplicitDenies, log,
|
||||
(err, sourceBucketMD, sourceObjMD) => {
|
||||
if (err) {
|
||||
log.debug('error validating get part of request',
|
||||
|
@ -117,28 +116,26 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
}
|
||||
if (!sourceObjMD) {
|
||||
log.debug('no source object', { sourceObject });
|
||||
const err = reqVersionId ? errors.NoSuchVersion :
|
||||
errors.NoSuchKey;
|
||||
const err = reqVersionId ? errors.NoSuchVersion
|
||||
: errors.NoSuchKey;
|
||||
return next(err, destBucketMD);
|
||||
}
|
||||
let sourceLocationConstraintName =
|
||||
sourceObjMD.dataStoreName;
|
||||
let sourceLocationConstraintName = sourceObjMD.dataStoreName;
|
||||
// for backwards compatibility before storing dataStoreName
|
||||
// TODO: handle in objectMD class
|
||||
if (!sourceLocationConstraintName &&
|
||||
sourceObjMD.location[0] &&
|
||||
sourceObjMD.location[0].dataStoreName) {
|
||||
sourceLocationConstraintName =
|
||||
sourceObjMD.location[0].dataStoreName;
|
||||
if (!sourceLocationConstraintName
|
||||
&& sourceObjMD.location[0]
|
||||
&& sourceObjMD.location[0].dataStoreName) {
|
||||
sourceLocationConstraintName = sourceObjMD.location[0].dataStoreName;
|
||||
}
|
||||
if (sourceObjMD.isDeleteMarker) {
|
||||
log.debug('delete marker on source object',
|
||||
{ sourceObject });
|
||||
{ sourceObject });
|
||||
if (reqVersionId) {
|
||||
const err = errors.InvalidRequest
|
||||
.customizeDescription('The source of a copy ' +
|
||||
'request may not specifically refer to a delete' +
|
||||
'marker by version id.');
|
||||
.customizeDescription('The source of a copy '
|
||||
+ 'request may not specifically refer to a delete'
|
||||
+ 'marker by version id.');
|
||||
return next(err, destBucketMD);
|
||||
}
|
||||
// if user specifies a key in a versioned source bucket
|
||||
|
@ -146,8 +143,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
// delete marker, return NoSuchKey
|
||||
return next(errors.NoSuchKey, destBucketMD);
|
||||
}
|
||||
const headerValResult =
|
||||
validateHeaders(request.headers,
|
||||
const headerValResult = validateHeaders(request.headers,
|
||||
sourceObjMD['last-modified'],
|
||||
sourceObjMD['content-md5']);
|
||||
if (headerValResult.error) {
|
||||
|
@ -162,15 +158,15 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
// If specific version requested, include copy source
|
||||
// version id in response. Include in request by default
|
||||
// if versioning is enabled or suspended.
|
||||
if (sourceBucketMD.getVersioningConfiguration() ||
|
||||
reqVersionId) {
|
||||
if (sourceBucketMD.getVersioningConfiguration()
|
||||
|| reqVersionId) {
|
||||
if (sourceObjMD.isNull || !sourceObjMD.versionId) {
|
||||
sourceVerId = 'null';
|
||||
} else {
|
||||
sourceVerId =
|
||||
versionIdUtils.encode(
|
||||
sourceObjMD.versionId,
|
||||
config.versionIdEncodingType);
|
||||
sourceVerId = versionIdUtils.encode(
|
||||
sourceObjMD.versionId,
|
||||
config.versionIdEncodingType,
|
||||
);
|
||||
}
|
||||
}
|
||||
return next(null, copyLocator.dataLocator, destBucketMD,
|
||||
|
@ -195,7 +191,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
});
|
||||
return next(err);
|
||||
}
|
||||
let splitter = constants.splitter;
|
||||
let { splitter } = constants;
|
||||
if (mpuBucket.getMdBucketModelVersion() < 2) {
|
||||
splitter = constants.oldSplitter;
|
||||
}
|
||||
|
@ -209,35 +205,33 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
function getMpuOverviewObject(dataLocator, destBucketMD,
|
||||
copyObjectSize, sourceVerId, splitter,
|
||||
sourceLocationConstraintName, next) {
|
||||
const mpuOverviewKey =
|
||||
`overview${splitter}${destObjectKey}${splitter}${uploadId}`;
|
||||
const mpuOverviewKey = `overview${splitter}${destObjectKey}${splitter}${uploadId}`;
|
||||
return metadata.getObjectMD(mpuBucketName, mpuOverviewKey,
|
||||
null, log, (err, res) => {
|
||||
if (err) {
|
||||
// TODO: move to `.is` once BKTCLT-9 is done and bumped in Cloudserver
|
||||
if (err.NoSuchKey) {
|
||||
return next(errors.NoSuchUpload);
|
||||
}
|
||||
log.error('error getting overview object from ' +
|
||||
'mpu bucket', {
|
||||
error: err,
|
||||
method: 'objectPutCopyPart::' +
|
||||
'metadata.getObjectMD',
|
||||
});
|
||||
return next(err);
|
||||
null, log, (err, res) => {
|
||||
if (err) {
|
||||
// TODO: move to `.is` once BKTCLT-9 is done and bumped in Cloudserver
|
||||
if (err.NoSuchKey) {
|
||||
return next(errors.NoSuchUpload);
|
||||
}
|
||||
const initiatorID = res.initiator.ID;
|
||||
const requesterID = authInfo.isRequesterAnIAMUser() ?
|
||||
authInfo.getArn() : authInfo.getCanonicalID();
|
||||
if (initiatorID !== requesterID) {
|
||||
return next(errors.AccessDenied);
|
||||
}
|
||||
const destObjLocationConstraint =
|
||||
res.controllingLocationConstraint;
|
||||
return next(null, dataLocator, destBucketMD,
|
||||
destObjLocationConstraint, copyObjectSize,
|
||||
sourceVerId, sourceLocationConstraintName, splitter);
|
||||
});
|
||||
log.error('error getting overview object from '
|
||||
+ 'mpu bucket', {
|
||||
error: err,
|
||||
method: 'objectPutCopyPart::'
|
||||
+ 'metadata.getObjectMD',
|
||||
});
|
||||
return next(err);
|
||||
}
|
||||
const initiatorID = res.initiator.ID;
|
||||
const requesterID = authInfo.isRequesterAnIAMUser()
|
||||
? authInfo.getArn() : authInfo.getCanonicalID();
|
||||
if (initiatorID !== requesterID) {
|
||||
return next(errors.AccessDenied);
|
||||
}
|
||||
const destObjLocationConstraint = res.controllingLocationConstraint;
|
||||
return next(null, dataLocator, destBucketMD,
|
||||
destObjLocationConstraint, copyObjectSize,
|
||||
sourceVerId, sourceLocationConstraintName, splitter);
|
||||
});
|
||||
},
|
||||
function goGetData(
|
||||
dataLocator,
|
||||
|
@ -249,6 +243,9 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
splitter,
|
||||
next,
|
||||
) {
|
||||
const originalIdentityAuthzResults = request.actionImplicitDenies;
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
delete request.actionImplicitDenies;
|
||||
data.uploadPartCopy(
|
||||
request,
|
||||
log,
|
||||
|
@ -259,31 +256,33 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
dataStoreContext,
|
||||
locationConstraintCheck,
|
||||
(error, eTag, lastModified, serverSideEncryption, locations) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
request.actionImplicitDenies = originalIdentityAuthzResults;
|
||||
if (error) {
|
||||
if (error.message === 'skip') {
|
||||
return next(skipError, destBucketMD, eTag,
|
||||
lastModified, sourceVerId,
|
||||
serverSideEncryption);
|
||||
lastModified, sourceVerId,
|
||||
serverSideEncryption);
|
||||
}
|
||||
return next(error, destBucketMD);
|
||||
}
|
||||
return next(null, destBucketMD, locations, eTag,
|
||||
copyObjectSize, sourceVerId, serverSideEncryption,
|
||||
lastModified, splitter);
|
||||
});
|
||||
copyObjectSize, sourceVerId, serverSideEncryption,
|
||||
lastModified, splitter);
|
||||
},
|
||||
);
|
||||
},
|
||||
function getExistingPartInfo(destBucketMD, locations, totalHash,
|
||||
copyObjectSize, sourceVerId, serverSideEncryption, lastModified,
|
||||
splitter, next) {
|
||||
const partKey =
|
||||
`${uploadId}${constants.splitter}${paddedPartNumber}`;
|
||||
const partKey = `${uploadId}${constants.splitter}${paddedPartNumber}`;
|
||||
metadata.getObjectMD(mpuBucketName, partKey, {}, log,
|
||||
(err, result) => {
|
||||
// If there is nothing being overwritten just move on
|
||||
// TODO: move to `.is` once BKTCLT-9 is done and bumped in Cloudserver
|
||||
if (err && !err.NoSuchKey) {
|
||||
log.debug('error getting current part (if any)',
|
||||
{ error: err });
|
||||
{ error: err });
|
||||
return next(err);
|
||||
}
|
||||
let oldLocations;
|
||||
|
@ -294,8 +293,8 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
// Pull locations to clean up any potential orphans
|
||||
// in data if object put is an overwrite of
|
||||
// already existing object with same key and part number
|
||||
oldLocations = Array.isArray(oldLocations) ?
|
||||
oldLocations : [oldLocations];
|
||||
oldLocations = Array.isArray(oldLocations)
|
||||
? oldLocations : [oldLocations];
|
||||
}
|
||||
return next(null, destBucketMD, locations, totalHash,
|
||||
prevObjectSize, copyObjectSize, sourceVerId,
|
||||
|
@ -317,7 +316,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
locations, metaStoreParams, log, err => {
|
||||
if (err) {
|
||||
log.debug('error storing new metadata',
|
||||
{ error: err, method: 'storeNewPartMetadata' });
|
||||
{ error: err, method: 'storeNewPartMetadata' });
|
||||
return next(err);
|
||||
}
|
||||
return next(null, locations, oldLocations, destBucketMD, totalHash,
|
||||
|
@ -370,7 +369,8 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
// data locations) has been stored
|
||||
if (oldLocationsToDelete) {
|
||||
const delLog = logger.newRequestLoggerFromSerializedUids(
|
||||
log.getSerializedUids());
|
||||
log.getSerializedUids(),
|
||||
);
|
||||
return data.batchDelete(oldLocationsToDelete, request.method, null,
|
||||
delLog, err => {
|
||||
if (err) {
|
||||
|
@ -409,11 +409,9 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
|||
|
||||
const additionalHeaders = corsHeaders || {};
|
||||
if (serverSideEncryption) {
|
||||
additionalHeaders['x-amz-server-side-encryption'] =
|
||||
serverSideEncryption.algorithm;
|
||||
additionalHeaders['x-amz-server-side-encryption'] = serverSideEncryption.algorithm;
|
||||
if (serverSideEncryption.algorithm === 'aws:kms') {
|
||||
additionalHeaders['x-amz-server-side-encryption-aws-kms-key-id']
|
||||
= serverSideEncryption.masterKeyId;
|
||||
additionalHeaders['x-amz-server-side-encryption-aws-kms-key-id'] = serverSideEncryption.masterKeyId;
|
||||
}
|
||||
}
|
||||
additionalHeaders['x-amz-copy-source-version-id'] = sourceVerId;
|
||||
|
|
|
@ -6,7 +6,7 @@ const { decodeVersionId, getVersionIdResHeader } =
|
|||
require('./apiUtils/object/versioning');
|
||||
const getReplicationInfo = require('./apiUtils/object/getReplicationInfo');
|
||||
const metadata = require('../metadata/wrapper');
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketAndObjNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
|
||||
const { parseLegalHoldXml } = s3middleware.objectLegalHold;
|
||||
|
@ -40,13 +40,13 @@ function objectPutLegalHold(authInfo, request, log, callback) {
|
|||
authInfo,
|
||||
bucketName,
|
||||
objectKey,
|
||||
requestType: 'objectPutLegalHold',
|
||||
versionId,
|
||||
requestType: request.apiMethods || 'objectPutLegalHold',
|
||||
request,
|
||||
};
|
||||
|
||||
return async.waterfall([
|
||||
next => metadataValidateBucketAndObj(metadataValParams, log,
|
||||
next => metadataValidateBucketAndObjNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket, objectMD) => {
|
||||
if (err) {
|
||||
log.trace('request authorization failed',
|
||||
|
|
|
@ -87,6 +87,7 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
const uploadId = request.query.uploadId;
|
||||
const mpuBucketName = `${constants.mpuBucketPrefix}${bucketName}`;
|
||||
const objectKey = request.objectKey;
|
||||
const originalIdentityAuthzResults = request.actionImplicitDenies;
|
||||
|
||||
return async.waterfall([
|
||||
// Get the destination bucket.
|
||||
|
@ -109,7 +110,8 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
// For validating the request at the destinationBucket level the
|
||||
// `requestType` is the general 'objectPut'.
|
||||
const requestType = 'objectPut';
|
||||
if (!isBucketAuthorized(destinationBucket, requestType, canonicalID, authInfo, log, request)) {
|
||||
if (!isBucketAuthorized(destinationBucket, request.apiMethods || requestType, canonicalID, authInfo,
|
||||
log, request, request.actionImplicitDenies)) {
|
||||
log.debug('access denied for user on bucket', { requestType });
|
||||
return next(errors.AccessDenied, destinationBucket);
|
||||
}
|
||||
|
@ -139,24 +141,24 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
// Get the MPU shadow bucket.
|
||||
(destinationBucket, cipherBundle, next) =>
|
||||
metadata.getBucket(mpuBucketName, log,
|
||||
(err, mpuBucket) => {
|
||||
if (err && err.is.NoSuchBucket) {
|
||||
return next(errors.NoSuchUpload, destinationBucket);
|
||||
}
|
||||
if (err) {
|
||||
log.error('error getting the shadow mpu bucket', {
|
||||
error: err,
|
||||
method: 'objectPutPart::metadata.getBucket',
|
||||
});
|
||||
return next(err, destinationBucket);
|
||||
}
|
||||
let splitter = constants.splitter;
|
||||
// BACKWARD: Remove to remove the old splitter
|
||||
if (mpuBucket.getMdBucketModelVersion() < 2) {
|
||||
splitter = constants.oldSplitter;
|
||||
}
|
||||
return next(null, destinationBucket, cipherBundle, splitter);
|
||||
}),
|
||||
(err, mpuBucket) => {
|
||||
if (err && err.is.NoSuchBucket) {
|
||||
return next(errors.NoSuchUpload, destinationBucket);
|
||||
}
|
||||
if (err) {
|
||||
log.error('error getting the shadow mpu bucket', {
|
||||
error: err,
|
||||
method: 'objectPutPart::metadata.getBucket',
|
||||
});
|
||||
return next(err, destinationBucket);
|
||||
}
|
||||
let splitter = constants.splitter;
|
||||
// BACKWARD: Remove to remove the old splitter
|
||||
if (mpuBucket.getMdBucketModelVersion() < 2) {
|
||||
splitter = constants.oldSplitter;
|
||||
}
|
||||
return next(null, destinationBucket, cipherBundle, splitter);
|
||||
}),
|
||||
// Check authorization of the MPU shadow bucket.
|
||||
(destinationBucket, cipherBundle, splitter, next) => {
|
||||
const mpuOverviewKey = _getOverviewKey(splitter, objectKey,
|
||||
|
@ -187,7 +189,7 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
// If data backend is backend that handles mpu (like real AWS),
|
||||
// no need to store part info in metadata
|
||||
(destinationBucket, objectLocationConstraint, cipherBundle,
|
||||
splitter, next) => {
|
||||
splitter, next) => {
|
||||
const mpuInfo = {
|
||||
destinationBucket,
|
||||
size,
|
||||
|
@ -196,24 +198,26 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
partNumber,
|
||||
bucketName,
|
||||
};
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
delete request.actionImplicitDenies;
|
||||
writeContinue(request, request._response);
|
||||
return data.putPart(request, mpuInfo, streamingV4Params,
|
||||
objectLocationConstraint, locationConstraintCheck, log,
|
||||
(err, partInfo, updatedObjectLC) => {
|
||||
if (err) {
|
||||
return next(err, destinationBucket);
|
||||
}
|
||||
// if data backend handles mpu, skip to end of waterfall
|
||||
if (partInfo && partInfo.dataStoreType === 'aws_s3') {
|
||||
return next(skipError, destinationBucket,
|
||||
partInfo.dataStoreETag);
|
||||
}
|
||||
// partInfo will be null if data backend is not external
|
||||
// if the object location constraint undefined because
|
||||
// mpu was initiated in legacy version, update it
|
||||
return next(null, destinationBucket, updatedObjectLC,
|
||||
cipherBundle, splitter, partInfo);
|
||||
});
|
||||
objectLocationConstraint, locationConstraintCheck, log,
|
||||
(err, partInfo, updatedObjectLC) => {
|
||||
if (err) {
|
||||
return next(err, destinationBucket);
|
||||
}
|
||||
// if data backend handles mpu, skip to end of waterfall
|
||||
if (partInfo && partInfo.dataStoreType === 'aws_s3') {
|
||||
return next(skipError, destinationBucket,
|
||||
partInfo.dataStoreETag);
|
||||
}
|
||||
// partInfo will be null if data backend is not external
|
||||
// if the object location constraint undefined because
|
||||
// mpu was initiated in legacy version, update it
|
||||
return next(null, destinationBucket, updatedObjectLC,
|
||||
cipherBundle, splitter, partInfo);
|
||||
});
|
||||
},
|
||||
// Get any pre-existing part.
|
||||
(destinationBucket, objectLocationConstraint, cipherBundle,
|
||||
|
@ -249,14 +253,14 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
},
|
||||
// Store in data backend.
|
||||
(destinationBucket, objectLocationConstraint, cipherBundle,
|
||||
partKey, prevObjectSize, oldLocations, partInfo, splitter, next) => {
|
||||
partKey, prevObjectSize, oldLocations, partInfo, splitter, next) => {
|
||||
// NOTE: set oldLocations to null so we do not batchDelete for now
|
||||
if (partInfo && partInfo.dataStoreType === 'azure') {
|
||||
// skip to storing metadata
|
||||
return next(null, destinationBucket, partInfo,
|
||||
partInfo.dataStoreETag,
|
||||
cipherBundle, partKey, prevObjectSize, null,
|
||||
objectLocationConstraint, splitter);
|
||||
partInfo.dataStoreETag,
|
||||
cipherBundle, partKey, prevObjectSize, null,
|
||||
objectLocationConstraint, splitter);
|
||||
}
|
||||
const objectContext = {
|
||||
bucketName,
|
||||
|
@ -282,7 +286,7 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
// Store data locations in metadata and delete any overwritten
|
||||
// data if completeMPU hasn't been initiated yet.
|
||||
(destinationBucket, dataGetInfo, hexDigest, cipherBundle, partKey,
|
||||
prevObjectSize, oldLocations, objectLocationConstraint, splitter, next) => {
|
||||
prevObjectSize, oldLocations, objectLocationConstraint, splitter, next) => {
|
||||
// Use an array to be consistent with objectPutCopyPart where there
|
||||
// could be multiple locations.
|
||||
const partLocations = [dataGetInfo];
|
||||
|
@ -317,7 +321,7 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
});
|
||||
},
|
||||
(partLocations, oldLocations, objectLocationConstraint, destinationBucket,
|
||||
hexDigest, prevObjectSize, splitter, next) => {
|
||||
hexDigest, prevObjectSize, splitter, next) => {
|
||||
if (!oldLocations) {
|
||||
return next(null, oldLocations, objectLocationConstraint,
|
||||
destinationBucket, hexDigest, prevObjectSize);
|
||||
|
@ -378,6 +382,8 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
], (err, destinationBucket, hexDigest, prevObjectSize) => {
|
||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||
request.method, destinationBucket);
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
request.actionImplicitDenies = originalIdentityAuthzResults;
|
||||
if (err) {
|
||||
if (err === skipError) {
|
||||
return cb(null, hexDigest, corsHeaders);
|
||||
|
|
|
@ -5,7 +5,7 @@ const { decodeVersionId, getVersionIdResHeader } =
|
|||
require('./apiUtils/object/versioning');
|
||||
const { ObjectLockInfo, checkUserGovernanceBypass, hasGovernanceBypassHeader } =
|
||||
require('./apiUtils/object/objectLockHelpers');
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketAndObjNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const getReplicationInfo = require('./apiUtils/object/getReplicationInfo');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
|
@ -41,45 +41,57 @@ function objectPutRetention(authInfo, request, log, callback) {
|
|||
authInfo,
|
||||
bucketName,
|
||||
objectKey,
|
||||
requestType: 'objectPutRetention',
|
||||
versionId: reqVersionId,
|
||||
requestType: request.apiMethods || 'objectPutRetention',
|
||||
request,
|
||||
};
|
||||
|
||||
return async.waterfall([
|
||||
next => metadataValidateBucketAndObj(metadataValParams, log,
|
||||
(err, bucket, objectMD) => {
|
||||
if (err) {
|
||||
log.trace('request authorization failed',
|
||||
{ method: 'objectPutRetention', error: err });
|
||||
return next(err);
|
||||
}
|
||||
if (!objectMD) {
|
||||
const err = reqVersionId ? errors.NoSuchVersion :
|
||||
errors.NoSuchKey;
|
||||
log.trace('error no object metadata found',
|
||||
{ method: 'objectPutRetention', error: err });
|
||||
return next(err, bucket);
|
||||
}
|
||||
if (objectMD.isDeleteMarker) {
|
||||
log.trace('version is a delete marker',
|
||||
{ method: 'objectPutRetention' });
|
||||
return next(errors.MethodNotAllowed, bucket);
|
||||
}
|
||||
if (!bucket.isObjectLockEnabled()) {
|
||||
log.trace('object lock not enabled on bucket',
|
||||
{ method: 'objectPutRetention' });
|
||||
return next(errors.InvalidRequest.customizeDescription(
|
||||
'Bucket is missing Object Lock Configuration'
|
||||
), bucket);
|
||||
}
|
||||
return next(null, bucket, objectMD);
|
||||
}),
|
||||
(bucket, objectMD, next) => {
|
||||
next => {
|
||||
log.trace('parsing retention information');
|
||||
parseRetentionXml(request.post, log,
|
||||
(err, retentionInfo) => next(err, bucket, retentionInfo, objectMD));
|
||||
(err, retentionInfo) => {
|
||||
if (err) {
|
||||
log.trace('error parsing retention information',
|
||||
{ error: err });
|
||||
return next(err);
|
||||
}
|
||||
const remainingDays = Math.ceil(
|
||||
(new Date(retentionInfo.date) - Date.now()) / (1000 * 3600 * 24));
|
||||
metadataValParams.request.objectLockRetentionDays = remainingDays;
|
||||
return next(null, retentionInfo);
|
||||
});
|
||||
},
|
||||
(retentionInfo, next) => metadataValidateBucketAndObjNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket, objectMD) => {
|
||||
if (err) {
|
||||
log.trace('request authorization failed',
|
||||
{ method: 'objectPutRetention', error: err });
|
||||
return next(err);
|
||||
}
|
||||
if (!objectMD) {
|
||||
const err = reqVersionId ? errors.NoSuchVersion :
|
||||
errors.NoSuchKey;
|
||||
log.trace('error no object metadata found',
|
||||
{ method: 'objectPutRetention', error: err });
|
||||
return next(err, bucket);
|
||||
}
|
||||
if (objectMD.isDeleteMarker) {
|
||||
log.trace('version is a delete marker',
|
||||
{ method: 'objectPutRetention' });
|
||||
// FIXME we should return a `x-amz-delete-marker: true` header,
|
||||
// see S3C-7592
|
||||
return next(errors.MethodNotAllowed, bucket);
|
||||
}
|
||||
if (!bucket.isObjectLockEnabled()) {
|
||||
log.trace('object lock not enabled on bucket',
|
||||
{ method: 'objectPutRetention' });
|
||||
return next(errors.InvalidRequest.customizeDescription(
|
||||
'Bucket is missing Object Lock Configuration'
|
||||
), bucket);
|
||||
}
|
||||
return next(null, bucket, retentionInfo, objectMD);
|
||||
}),
|
||||
(bucket, retentionInfo, objectMD, next) => {
|
||||
const hasGovernanceBypass = hasGovernanceBypassHeader(request.headers);
|
||||
if (hasGovernanceBypass && authInfo.isRequesterAnIAMUser()) {
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
const async = require('async');
|
||||
const { errors, s3middleware } = require('arsenal');
|
||||
|
||||
const { decodeVersionId, getVersionIdResHeader } =
|
||||
require('./apiUtils/object/versioning');
|
||||
const { decodeVersionId, getVersionIdResHeader } = require('./apiUtils/object/versioning');
|
||||
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { metadataValidateBucketAndObjNew } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const getReplicationInfo = require('./apiUtils/object/getReplicationInfo');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const metadata = require('../metadata/wrapper');
|
||||
const { data } = require('../data/wrapper');
|
||||
|
||||
const { parseTagXml } = s3middleware.tagging;
|
||||
const REPLICATION_ACTION = 'PUT_TAGGING';
|
||||
|
||||
|
@ -24,8 +24,8 @@ const REPLICATION_ACTION = 'PUT_TAGGING';
|
|||
function objectPutTagging(authInfo, request, log, callback) {
|
||||
log.debug('processing request', { method: 'objectPutTagging' });
|
||||
|
||||
const bucketName = request.bucketName;
|
||||
const objectKey = request.objectKey;
|
||||
const { bucketName } = request;
|
||||
const { objectKey } = request;
|
||||
|
||||
const decodedVidResult = decodeVersionId(request.query);
|
||||
if (decodedVidResult instanceof Error) {
|
||||
|
@ -41,13 +41,13 @@ function objectPutTagging(authInfo, request, log, callback) {
|
|||
authInfo,
|
||||
bucketName,
|
||||
objectKey,
|
||||
requestType: 'objectPutTagging',
|
||||
versionId: reqVersionId,
|
||||
requestType: request.apiMethods || 'objectPutTagging',
|
||||
request,
|
||||
};
|
||||
|
||||
return async.waterfall([
|
||||
next => metadataValidateBucketAndObj(metadataValParams, log,
|
||||
next => metadataValidateBucketAndObjNew(metadataValParams, request.actionImplicitDenies, log,
|
||||
(err, bucket, objectMD) => {
|
||||
if (err) {
|
||||
log.trace('request authorization failed',
|
||||
|
@ -70,8 +70,7 @@ function objectPutTagging(authInfo, request, log, callback) {
|
|||
}),
|
||||
(bucket, objectMD, next) => {
|
||||
log.trace('parsing tag(s)');
|
||||
parseTagXml(request.post, log, (err, tags) =>
|
||||
next(err, bucket, tags, objectMD));
|
||||
parseTagXml(request.post, log, (err, tags) => next(err, bucket, tags, objectMD));
|
||||
},
|
||||
(bucket, tags, objectMD, next) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
|
@ -88,13 +87,11 @@ function objectPutTagging(authInfo, request, log, callback) {
|
|||
// eslint-disable-next-line no-param-reassign
|
||||
objectMD.originOp = 's3:ObjectTagging:Put';
|
||||
metadata.putObjectMD(bucket.getName(), objectKey, objectMD, params,
|
||||
log, err =>
|
||||
next(err, bucket, objectMD));
|
||||
log, err => next(err, bucket, objectMD));
|
||||
},
|
||||
(bucket, objectMD, next) =>
|
||||
// if external backend handles tagging
|
||||
data.objectTagging('Put', objectKey, bucket, objectMD,
|
||||
log, err => next(err, bucket, objectMD)),
|
||||
// if external backend handles tagging
|
||||
(bucket, objectMD, next) => data.objectTagging('Put', objectKey, bucket, objectMD,
|
||||
log, err => next(err, bucket, objectMD)),
|
||||
], (err, bucket, objectMD) => {
|
||||
const additionalResHeaders = collectCorsHeaders(request.headers.origin,
|
||||
request.method, bucket);
|
||||
|
@ -110,8 +107,7 @@ function objectPutTagging(authInfo, request, log, callback) {
|
|||
location: objectMD ? objectMD.dataStoreName : undefined,
|
||||
});
|
||||
const verCfg = bucket.getVersioningConfiguration();
|
||||
additionalResHeaders['x-amz-version-id'] =
|
||||
getVersionIdResHeader(verCfg, objectMD);
|
||||
additionalResHeaders['x-amz-version-id'] = getVersionIdResHeader(verCfg, objectMD);
|
||||
}
|
||||
return callback(err, additionalResHeaders);
|
||||
});
|
||||
|
|
|
@ -21,12 +21,13 @@ const { pushMetric } = require('../utapi/utilities');
|
|||
* @param {string} objectKey - object key from request (or as translated in
|
||||
* websiteGet)
|
||||
* @param {object} corsHeaders - CORS-related response headers
|
||||
* @param {object} request - normalized request object
|
||||
* @param {object} log - Werelogs instance
|
||||
* @param {function} callback - callback to function in route
|
||||
* @return {undefined}
|
||||
*/
|
||||
function _errorActions(err, errorDocument, routingRules,
|
||||
bucket, objectKey, corsHeaders, log, callback) {
|
||||
bucket, objectKey, corsHeaders, request, log, callback) {
|
||||
const bucketName = bucket.getName();
|
||||
const errRoutingRule = findRoutingRule(routingRules,
|
||||
objectKey, err.code);
|
||||
|
@ -47,7 +48,7 @@ function _errorActions(err, errorDocument, routingRules,
|
|||
// return the default error message if the object is private
|
||||
// rather than sending a stored error file
|
||||
if (!isObjAuthorized(bucket, errObjMD, 'objectGet',
|
||||
constants.publicId, null, log)) {
|
||||
constants.publicId, null, log, null, request.actionImplicitDenies)) {
|
||||
log.trace('errorObj not authorized', { error: err });
|
||||
return callback(err, true, null, corsHeaders);
|
||||
}
|
||||
|
@ -144,7 +145,7 @@ function websiteGet(request, log, callback) {
|
|||
{ error: err });
|
||||
let returnErr = err;
|
||||
const bucketAuthorized = isBucketAuthorized(bucket,
|
||||
'bucketGet', constants.publicId, null, log, request);
|
||||
'bucketGet', constants.publicId, null, log, request, request.actionImplicitDenies);
|
||||
// if index object does not exist and bucket is private AWS
|
||||
// returns 403 - AccessDenied error.
|
||||
if (err.is.NoSuchKey && !bucketAuthorized) {
|
||||
|
@ -152,16 +153,16 @@ function websiteGet(request, log, callback) {
|
|||
}
|
||||
return _errorActions(returnErr,
|
||||
websiteConfig.getErrorDocument(), routingRules,
|
||||
bucket, reqObjectKey, corsHeaders, log,
|
||||
bucket, reqObjectKey, corsHeaders, request, log,
|
||||
callback);
|
||||
}
|
||||
if (!isObjAuthorized(bucket, objMD, 'objectGet',
|
||||
constants.publicId, null, log, request)) {
|
||||
constants.publicId, null, log, request, request.actionImplicitDenies)) {
|
||||
const err = errors.AccessDenied;
|
||||
log.trace('request not authorized', { error: err });
|
||||
return _errorActions(err, websiteConfig.getErrorDocument(),
|
||||
routingRules, bucket,
|
||||
reqObjectKey, corsHeaders, log, callback);
|
||||
reqObjectKey, corsHeaders, request, log, callback);
|
||||
}
|
||||
|
||||
const headerValResult = validateHeaders(request.headers,
|
||||
|
@ -171,7 +172,7 @@ function websiteGet(request, log, callback) {
|
|||
log.trace('header validation error', { error: err });
|
||||
return _errorActions(err, websiteConfig.getErrorDocument(),
|
||||
routingRules, bucket, reqObjectKey,
|
||||
corsHeaders, log, callback);
|
||||
corsHeaders, request, log, callback);
|
||||
}
|
||||
// check if object to serve has website redirect header
|
||||
// Note: AWS prioritizes website configuration rules over
|
||||
|
|
|
@ -73,13 +73,6 @@ function metadataGetBucketAndObject(requestType, bucketName, objectKey,
|
|||
});
|
||||
return cb(errors.NoSuchBucket);
|
||||
}
|
||||
if (bucketShield(bucket, requestType)) {
|
||||
log.debug('bucket is shielded from request', {
|
||||
requestType,
|
||||
method: 'metadataGetBucketAndObject',
|
||||
});
|
||||
return cb(errors.NoSuchBucket);
|
||||
}
|
||||
log.trace('found bucket in metadata');
|
||||
return cb(null, bucket, obj);
|
||||
});
|
||||
|
@ -117,6 +110,53 @@ function metadataGetObject(bucketName, objectKey, versionId, log, cb) {
|
|||
return cb(null, objMD);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Validate that a bucket is accessible and authorized to the user,
|
||||
* return a specific error code otherwise
|
||||
*
|
||||
* @param {BucketInfo} bucket - bucket info
|
||||
* @param {object} params - function parameters
|
||||
* @param {AuthInfo} params.authInfo - AuthInfo class instance, requester's info
|
||||
* @param {string} params.requestType - type of request
|
||||
* @param {string} [params.preciseRequestType] - precise type of request
|
||||
* @param {object} params.request - http request object
|
||||
* @param {RequestLogger} log - request logger
|
||||
* @param {object} actionImplicitDenies - identity authorization results
|
||||
* @return {ArsenalError|null} returns a validation error, or null if validation OK
|
||||
* The following errors may be returned:
|
||||
* - NoSuchBucket: bucket is shielded
|
||||
* - MethodNotAllowed: requester is not bucket owner and asking for a
|
||||
* bucket policy operation
|
||||
* - AccessDenied: bucket is not authorized
|
||||
*/
|
||||
function validateBucket(bucket, params, log, actionImplicitDenies = {}) {
|
||||
const { authInfo, preciseRequestType, request } = params;
|
||||
let requestType = params.requestType;
|
||||
if (bucketShield(bucket, requestType)) {
|
||||
log.debug('bucket is shielded from request', {
|
||||
requestType,
|
||||
method: 'validateBucket',
|
||||
});
|
||||
return errors.NoSuchBucket;
|
||||
}
|
||||
// if requester is not bucket owner, bucket policy actions should be denied with
|
||||
// MethodNotAllowed error
|
||||
const onlyOwnerAllowed = ['bucketDeletePolicy', 'bucketGetPolicy', 'bucketPutPolicy'];
|
||||
const canonicalID = authInfo.getCanonicalID();
|
||||
if (!Array.isArray(requestType)) {
|
||||
requestType = [requestType];
|
||||
}
|
||||
if (bucket.getOwner() !== canonicalID && requestType.some(type => onlyOwnerAllowed.includes(type))) {
|
||||
return errors.MethodNotAllowed;
|
||||
}
|
||||
if (!isBucketAuthorized(bucket, (preciseRequestType || requestType), canonicalID,
|
||||
authInfo, log, request, actionImplicitDenies)) {
|
||||
log.debug('access denied for user on bucket', { requestType });
|
||||
return errors.AccessDenied;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
/** metadataValidateBucketAndObj - retrieve bucket and object md from metadata
|
||||
* and check if user is authorized to access them.
|
||||
|
@ -127,41 +167,86 @@ function metadataGetObject(bucketName, objectKey, versionId, log, cb) {
|
|||
* @param {string} [params.versionId] - version id if getting specific version
|
||||
* @param {string} params.requestType - type of request
|
||||
* @param {object} params.request - http request object
|
||||
* @param {boolean} actionImplicitDenies - identity authorization results
|
||||
* @param {RequestLogger} log - request logger
|
||||
* @param {function} callback - callback
|
||||
* @return {undefined} - and call callback with params err, bucket md
|
||||
*/
|
||||
function metadataValidateBucketAndObj(params, log, callback) {
|
||||
const { authInfo, bucketName, objectKey, versionId, requestType, preciseRequestType, request } = params;
|
||||
const canonicalID = authInfo.getCanonicalID();
|
||||
function metadataValidateBucketAndObjNew(params, actionImplicitDenies, log, callback) {
|
||||
const { authInfo, bucketName, objectKey, versionId, request } = params;
|
||||
let requestType = params.requestType;
|
||||
if (!Array.isArray(requestType)) {
|
||||
requestType = [requestType];
|
||||
}
|
||||
async.waterfall([
|
||||
function getBucketAndObjectMD(next) {
|
||||
return metadataGetBucketAndObject(requestType, bucketName,
|
||||
objectKey, versionId, log, next);
|
||||
},
|
||||
function checkBucketAuth(bucket, objMD, next) {
|
||||
// if requester is not bucket owner, bucket policy actions should be denied with
|
||||
// MethodNotAllowed error
|
||||
const onlyOwnerAllowed = ['bucketDeletePolicy', 'bucketGetPolicy', 'bucketPutPolicy'];
|
||||
if (bucket.getOwner() !== canonicalID && onlyOwnerAllowed.includes(requestType)) {
|
||||
return next(errors.MethodNotAllowed, bucket);
|
||||
next => metadataGetBucketAndObject(requestType, bucketName,
|
||||
objectKey, versionId, log, (err, bucket, objMD) => {
|
||||
if (err) {
|
||||
if (actionImplicitDenies && Object.values(actionImplicitDenies).some(v => v === true)) {
|
||||
return next(errors.AccessDenied);
|
||||
}
|
||||
return next(err);
|
||||
}
|
||||
return next(null, bucket, objMD);
|
||||
}),
|
||||
(bucket, objMD, next) => {
|
||||
const validationError = validateBucket(bucket, params, log, actionImplicitDenies);
|
||||
if (validationError) {
|
||||
return next(validationError, bucket);
|
||||
}
|
||||
if (!isBucketAuthorized(bucket, (preciseRequestType || requestType), canonicalID,
|
||||
authInfo, log, request)) {
|
||||
log.debug('access denied for user on bucket', { requestType });
|
||||
return next(errors.AccessDenied, bucket);
|
||||
}
|
||||
return next(null, bucket, objMD);
|
||||
},
|
||||
function handleNullVersionGet(bucket, objMD, next) {
|
||||
if (objMD && versionId === 'null') {
|
||||
return getNullVersion(objMD, bucketName, objectKey, log,
|
||||
(err, nullVer) => next(err, bucket, nullVer));
|
||||
}
|
||||
return next(null, bucket, objMD);
|
||||
},
|
||||
function checkObjectAuth(bucket, objMD, next) {
|
||||
if (!isObjAuthorized(bucket, objMD, requestType, canonicalID, authInfo, log, request)) {
|
||||
(bucket, objMD, next) => {
|
||||
const canonicalID = authInfo.getCanonicalID();
|
||||
if (!isObjAuthorized(bucket, objMD, requestType, canonicalID, authInfo,
|
||||
log, request, actionImplicitDenies)) {
|
||||
log.debug('access denied for user on object', { requestType });
|
||||
return next(errors.AccessDenied, bucket);
|
||||
}
|
||||
return next(null, bucket, objMD);
|
||||
},
|
||||
], (err, bucket, objMD) => {
|
||||
if (err) {
|
||||
return callback(err, bucket);
|
||||
}
|
||||
return callback(null, bucket, objMD);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function metadataValidateBucketAndObj(params, log, callback) {
|
||||
const { authInfo, bucketName, objectKey, versionId, request } = params;
|
||||
let requestType = params.requestType;
|
||||
if (!Array.isArray(requestType)) {
|
||||
requestType = [requestType];
|
||||
}
|
||||
async.waterfall([
|
||||
next => metadataGetBucketAndObject(requestType, bucketName,
|
||||
objectKey, versionId, log, (err, bucket, objMD) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
return next(null, bucket, objMD);
|
||||
}),
|
||||
(bucket, objMD, next) => {
|
||||
const validationError = validateBucket(bucket, params, log);
|
||||
if (validationError) {
|
||||
return next(validationError, bucket);
|
||||
}
|
||||
if (objMD && versionId === 'null') {
|
||||
return getNullVersion(objMD, bucketName, objectKey, log,
|
||||
(err, nullVer) => next(err, bucket, nullVer));
|
||||
}
|
||||
return next(null, bucket, objMD);
|
||||
},
|
||||
(bucket, objMD, next) => {
|
||||
const canonicalID = authInfo.getCanonicalID();
|
||||
if (!isObjAuthorized(bucket, objMD, requestType, canonicalID, authInfo,
|
||||
log, request)) {
|
||||
log.debug('access denied for user on object', { requestType });
|
||||
return next(errors.AccessDenied, bucket);
|
||||
}
|
||||
|
@ -175,7 +260,6 @@ function metadataValidateBucketAndObj(params, log, callback) {
|
|||
return callback(null, bucket, objMD);
|
||||
});
|
||||
}
|
||||
|
||||
/** metadataGetBucket - retrieves bucket from metadata, returning error if
|
||||
* bucket is shielded
|
||||
* @param {string} requestType - type of request
|
||||
|
@ -209,34 +293,38 @@ function metadataGetBucket(requestType, bucketName, log, cb) {
|
|||
* @param {string} params.bucketName - name of bucket
|
||||
* @param {string} params.requestType - type of request
|
||||
* @param {string} params.request - http request object
|
||||
* @param {boolean} actionImplicitDenies - identity authorization results
|
||||
* @param {RequestLogger} log - request logger
|
||||
* @param {function} callback - callback
|
||||
* @return {undefined} - and call callback with params err, bucket md
|
||||
*/
|
||||
function metadataValidateBucket(params, log, callback) {
|
||||
const { authInfo, bucketName, requestType, preciseRequestType, request } = params;
|
||||
const canonicalID = authInfo.getCanonicalID();
|
||||
function metadataValidateBucketNew(params, actionImplicitDenies, log, callback) {
|
||||
const { bucketName, requestType } = params;
|
||||
return metadataGetBucket(requestType, bucketName, log, (err, bucket) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
// if requester is not bucket owner, bucket policy actions should be denied with
|
||||
// MethodNotAllowed error
|
||||
const onlyOwnerAllowed = ['bucketDeletePolicy', 'bucketGetPolicy', 'bucketPutPolicy'];
|
||||
if (bucket.getOwner() !== canonicalID && onlyOwnerAllowed.includes(requestType)) {
|
||||
return callback(errors.MethodNotAllowed, bucket);
|
||||
const validationError = validateBucket(bucket, params, log, actionImplicitDenies);
|
||||
return callback(validationError, bucket);
|
||||
});
|
||||
}
|
||||
|
||||
function metadataValidateBucket(params, log, callback) {
|
||||
const { bucketName, requestType } = params;
|
||||
return metadataGetBucket(requestType, bucketName, log, (err, bucket) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
// still return bucket for cors headers
|
||||
if (!isBucketAuthorized(bucket, (preciseRequestType || requestType), canonicalID, authInfo, log, request)) {
|
||||
log.debug('access denied for user on bucket', { requestType });
|
||||
return callback(errors.AccessDenied, bucket);
|
||||
}
|
||||
return callback(null, bucket);
|
||||
const validationError = validateBucket(bucket, params, log);
|
||||
return callback(validationError, bucket);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
metadataGetObject,
|
||||
validateBucket,
|
||||
metadataValidateBucketAndObj,
|
||||
metadataValidateBucketAndObjNew,
|
||||
metadataValidateBucket,
|
||||
metadataValidateBucketNew,
|
||||
};
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
"homepage": "https://github.com/scality/S3#readme",
|
||||
"dependencies": {
|
||||
"@hapi/joi": "^17.1.0",
|
||||
"arsenal": "git+https://github.com/scality/arsenal#7.10.48",
|
||||
"arsenal": "git+https://github.com/scality/arsenal#7.10.49",
|
||||
"async": "~2.5.0",
|
||||
"aws-sdk": "2.905.0",
|
||||
"azure-storage": "^2.1.0",
|
||||
|
|
|
@ -3,21 +3,19 @@ const async = require('async');
|
|||
const { parseString } = require('xml2js');
|
||||
const AWS = require('aws-sdk');
|
||||
|
||||
const { cleanup, DummyRequestLogger, makeAuthInfo }
|
||||
= require('../unit/helpers');
|
||||
const { metadata } = require('arsenal').storage.metadata.inMemory.metadata;
|
||||
const { cleanup, DummyRequestLogger, makeAuthInfo } = require('../unit/helpers');
|
||||
const { ds } = require('arsenal').storage.data.inMemory.datastore;
|
||||
const { bucketPut } = require('../../lib/api/bucketPut');
|
||||
const initiateMultipartUpload
|
||||
= require('../../lib/api/initiateMultipartUpload');
|
||||
const initiateMultipartUpload = require('../../lib/api/initiateMultipartUpload');
|
||||
const objectPut = require('../../lib/api/objectPut');
|
||||
const objectPutCopyPart = require('../../lib/api/objectPutCopyPart');
|
||||
const DummyRequest = require('../unit/DummyRequest');
|
||||
const { metadata } = require('arsenal').storage.metadata.inMemory.metadata;
|
||||
const constants = require('../../constants');
|
||||
|
||||
const s3 = new AWS.S3();
|
||||
|
||||
const splitter = constants.splitter;
|
||||
const { splitter } = constants;
|
||||
const log = new DummyRequestLogger();
|
||||
const canonicalID = 'accessKey1';
|
||||
const authInfo = makeAuthInfo(canonicalID);
|
||||
|
@ -56,14 +54,14 @@ function getAwsParamsBucketMismatch(destObjName, uploadId) {
|
|||
}
|
||||
|
||||
function copyPutPart(bucketLoc, mpuLoc, srcObjLoc, requestHost, cb,
|
||||
errorPutCopyPart) {
|
||||
errorPutCopyPart) {
|
||||
const keys = getSourceAndDestKeys();
|
||||
const { sourceObjName, destObjName } = keys;
|
||||
const post = bucketLoc ? '<?xml version="1.0" encoding="UTF-8"?>' +
|
||||
'<CreateBucketConfiguration ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
`<LocationConstraint>${bucketLoc}</LocationConstraint>` +
|
||||
'</CreateBucketConfiguration>' : '';
|
||||
const post = bucketLoc ? '<?xml version="1.0" encoding="UTF-8"?>'
|
||||
+ '<CreateBucketConfiguration '
|
||||
+ 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ `<LocationConstraint>${bucketLoc}</LocationConstraint>`
|
||||
+ '</CreateBucketConfiguration>' : '';
|
||||
const bucketPutReq = new DummyRequest({
|
||||
bucketName,
|
||||
namespace,
|
||||
|
@ -80,10 +78,13 @@ errorPutCopyPart) {
|
|||
objectKey: destObjName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: `/${destObjName}?uploads`,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
if (mpuLoc) {
|
||||
initiateReq.headers = { 'host': `${bucketName}.s3.amazonaws.com`,
|
||||
'x-amz-meta-scal-location-constraint': `${mpuLoc}` };
|
||||
initiateReq.headers = {
|
||||
'host': `${bucketName}.s3.amazonaws.com`,
|
||||
'x-amz-meta-scal-location-constraint': `${mpuLoc}`,
|
||||
};
|
||||
}
|
||||
if (requestHost) {
|
||||
initiateReq.parsedHost = requestHost;
|
||||
|
@ -94,10 +95,13 @@ errorPutCopyPart) {
|
|||
objectKey: sourceObjName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
if (srcObjLoc) {
|
||||
sourceObjPutParams.headers = { 'host': `${bucketName}.s3.amazonaws.com`,
|
||||
'x-amz-meta-scal-location-constraint': `${srcObjLoc}` };
|
||||
sourceObjPutParams.headers = {
|
||||
'host': `${bucketName}.s3.amazonaws.com`,
|
||||
'x-amz-meta-scal-location-constraint': `${srcObjLoc}`,
|
||||
};
|
||||
}
|
||||
const sourceObjPutReq = new DummyRequest(sourceObjPutParams, body);
|
||||
if (requestHost) {
|
||||
|
@ -112,8 +116,7 @@ errorPutCopyPart) {
|
|||
});
|
||||
},
|
||||
next => {
|
||||
objectPut(authInfo, sourceObjPutReq, undefined, log, err =>
|
||||
next(err));
|
||||
objectPut(authInfo, sourceObjPutReq, undefined, log, err => next(err));
|
||||
},
|
||||
next => {
|
||||
initiateMultipartUpload(authInfo, initiateReq, log, next);
|
||||
|
@ -130,8 +133,8 @@ errorPutCopyPart) {
|
|||
// Need to build request in here since do not have
|
||||
// uploadId until here
|
||||
assert.ifError(err, 'Error putting source object or initiate MPU');
|
||||
const testUploadId = json.InitiateMultipartUploadResult.
|
||||
UploadId[0];
|
||||
const testUploadId = json.InitiateMultipartUploadResult
|
||||
.UploadId[0];
|
||||
const copyPartParams = {
|
||||
bucketName,
|
||||
namespace,
|
||||
|
@ -172,137 +175,137 @@ function assertPartList(partList, uploadId) {
|
|||
}
|
||||
|
||||
describeSkipIfE2E('ObjectCopyPutPart API with multiple backends',
|
||||
function testSuite() {
|
||||
this.timeout(60000);
|
||||
function testSuite() {
|
||||
this.timeout(60000);
|
||||
|
||||
beforeEach(() => {
|
||||
cleanup();
|
||||
});
|
||||
beforeEach(() => {
|
||||
cleanup();
|
||||
});
|
||||
|
||||
it('should copy part to mem based on mpu location', done => {
|
||||
copyPutPart(fileLocation, memLocation, null, 'localhost', () => {
|
||||
it('should copy part to mem based on mpu location', done => {
|
||||
copyPutPart(fileLocation, memLocation, null, 'localhost', () => {
|
||||
// object info is stored in ds beginning at index one,
|
||||
// so an array length of two means only one object
|
||||
// was stored in mem
|
||||
assert.strictEqual(ds.length, 2);
|
||||
assert.deepStrictEqual(ds[1].value, body);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy part to file based on mpu location', done => {
|
||||
copyPutPart(memLocation, fileLocation, null, 'localhost', () => {
|
||||
assert.strictEqual(ds.length, 2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy part to AWS based on mpu location', done => {
|
||||
copyPutPart(memLocation, awsLocation, null, 'localhost',
|
||||
(keys, uploadId) => {
|
||||
assert.strictEqual(ds.length, 2);
|
||||
const awsReq = getAwsParams(keys.destObjName, uploadId);
|
||||
s3.listParts(awsReq, (err, partList) => {
|
||||
assertPartList(partList, uploadId);
|
||||
s3.abortMultipartUpload(awsReq, err => {
|
||||
assert.equal(err, null, `Error aborting MPU: ${err}. ` +
|
||||
`You must abort MPU with upload ID ${uploadId} manually.`);
|
||||
done();
|
||||
});
|
||||
assert.strictEqual(ds.length, 2);
|
||||
assert.deepStrictEqual(ds[1].value, body);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy part to mem from AWS based on mpu location', done => {
|
||||
copyPutPart(awsLocation, memLocation, null, 'localhost', () => {
|
||||
assert.strictEqual(ds.length, 2);
|
||||
assert.deepStrictEqual(ds[1].value, body);
|
||||
done();
|
||||
it('should copy part to file based on mpu location', done => {
|
||||
copyPutPart(memLocation, fileLocation, null, 'localhost', () => {
|
||||
assert.strictEqual(ds.length, 2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy part to mem based on bucket location', done => {
|
||||
copyPutPart(memLocation, null, null, 'localhost', () => {
|
||||
it('should copy part to AWS based on mpu location', done => {
|
||||
copyPutPart(memLocation, awsLocation, null, 'localhost',
|
||||
(keys, uploadId) => {
|
||||
assert.strictEqual(ds.length, 2);
|
||||
const awsReq = getAwsParams(keys.destObjName, uploadId);
|
||||
s3.listParts(awsReq, (err, partList) => {
|
||||
assertPartList(partList, uploadId);
|
||||
s3.abortMultipartUpload(awsReq, err => {
|
||||
assert.equal(err, null, `Error aborting MPU: ${err}. `
|
||||
+ `You must abort MPU with upload ID ${uploadId} manually.`);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy part to mem from AWS based on mpu location', done => {
|
||||
copyPutPart(awsLocation, memLocation, null, 'localhost', () => {
|
||||
assert.strictEqual(ds.length, 2);
|
||||
assert.deepStrictEqual(ds[1].value, body);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy part to mem based on bucket location', done => {
|
||||
copyPutPart(memLocation, null, null, 'localhost', () => {
|
||||
// ds length should be three because both source
|
||||
// and copied objects should be in mem
|
||||
assert.strictEqual(ds.length, 3);
|
||||
assert.deepStrictEqual(ds[2].value, body);
|
||||
done();
|
||||
assert.strictEqual(ds.length, 3);
|
||||
assert.deepStrictEqual(ds[2].value, body);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy part to file based on bucket location', done => {
|
||||
copyPutPart(fileLocation, null, null, 'localhost', () => {
|
||||
it('should copy part to file based on bucket location', done => {
|
||||
copyPutPart(fileLocation, null, null, 'localhost', () => {
|
||||
// ds should be empty because both source and
|
||||
// coped objects should be in file
|
||||
assert.deepStrictEqual(ds, []);
|
||||
done();
|
||||
assert.deepStrictEqual(ds, []);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy part to AWS based on bucket location', done => {
|
||||
copyPutPart(awsLocation, null, null, 'localhost', (keys, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
const awsReq = getAwsParams(keys.destObjName, uploadId);
|
||||
s3.listParts(awsReq, (err, partList) => {
|
||||
assertPartList(partList, uploadId);
|
||||
s3.abortMultipartUpload(awsReq, err => {
|
||||
assert.equal(err, null, `Error aborting MPU: ${err}. ` +
|
||||
`You must abort MPU with upload ID ${uploadId} manually.`);
|
||||
done();
|
||||
it('should copy part to AWS based on bucket location', done => {
|
||||
copyPutPart(awsLocation, null, null, 'localhost', (keys, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
const awsReq = getAwsParams(keys.destObjName, uploadId);
|
||||
s3.listParts(awsReq, (err, partList) => {
|
||||
assertPartList(partList, uploadId);
|
||||
s3.abortMultipartUpload(awsReq, err => {
|
||||
assert.equal(err, null, `Error aborting MPU: ${err}. `
|
||||
+ `You must abort MPU with upload ID ${uploadId} manually.`);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy part an object on AWS location that has bucketMatch ' +
|
||||
'equals false to a mpu with a different AWS location', done => {
|
||||
copyPutPart(null, awsLocation, awsLocationMismatch, 'localhost',
|
||||
(keys, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
const awsReq = getAwsParams(keys.destObjName, uploadId);
|
||||
s3.listParts(awsReq, (err, partList) => {
|
||||
assertPartList(partList, uploadId);
|
||||
s3.abortMultipartUpload(awsReq, err => {
|
||||
assert.equal(err, null, `Error aborting MPU: ${err}. ` +
|
||||
`You must abort MPU with upload ID ${uploadId} manually.`);
|
||||
done();
|
||||
it('should copy part an object on AWS location that has bucketMatch '
|
||||
+ 'equals false to a mpu with a different AWS location', done => {
|
||||
copyPutPart(null, awsLocation, awsLocationMismatch, 'localhost',
|
||||
(keys, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
const awsReq = getAwsParams(keys.destObjName, uploadId);
|
||||
s3.listParts(awsReq, (err, partList) => {
|
||||
assertPartList(partList, uploadId);
|
||||
s3.abortMultipartUpload(awsReq, err => {
|
||||
assert.equal(err, null, `Error aborting MPU: ${err}. `
|
||||
+ `You must abort MPU with upload ID ${uploadId} manually.`);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy part an object on AWS to a mpu with a different '
|
||||
+ 'AWS location that has bucketMatch equals false', done => {
|
||||
copyPutPart(null, awsLocationMismatch, awsLocation, 'localhost',
|
||||
(keys, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
const awsReq = getAwsParamsBucketMismatch(keys.destObjName,
|
||||
uploadId);
|
||||
s3.listParts(awsReq, (err, partList) => {
|
||||
assertPartList(partList, uploadId);
|
||||
s3.abortMultipartUpload(awsReq, err => {
|
||||
assert.equal(err, null, `Error aborting MPU: ${err}. `
|
||||
+ `You must abort MPU with upload ID ${uploadId} manually.`);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should return error 403 AccessDenied copying part to a '
|
||||
+ 'different AWS location without object READ access',
|
||||
done => {
|
||||
const errorPutCopyPart = { code: 'AccessDenied', statusCode: 403 };
|
||||
copyPutPart(null, awsLocation, awsLocation2, 'localhost', done,
|
||||
errorPutCopyPart);
|
||||
});
|
||||
|
||||
|
||||
it('should copy part to file based on request endpoint', done => {
|
||||
copyPutPart(null, null, memLocation, 'localhost', () => {
|
||||
assert.strictEqual(ds.length, 2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should copy part an object on AWS to a mpu with a different ' +
|
||||
'AWS location that has bucketMatch equals false', done => {
|
||||
copyPutPart(null, awsLocationMismatch, awsLocation, 'localhost',
|
||||
(keys, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
const awsReq = getAwsParamsBucketMismatch(keys.destObjName,
|
||||
uploadId);
|
||||
s3.listParts(awsReq, (err, partList) => {
|
||||
assertPartList(partList, uploadId);
|
||||
s3.abortMultipartUpload(awsReq, err => {
|
||||
assert.equal(err, null, `Error aborting MPU: ${err}. ` +
|
||||
`You must abort MPU with upload ID ${uploadId} manually.`);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should return error 403 AccessDenied copying part to a ' +
|
||||
'different AWS location without object READ access',
|
||||
done => {
|
||||
const errorPutCopyPart = { code: 'AccessDenied', statusCode: 403 };
|
||||
copyPutPart(null, awsLocation, awsLocation2, 'localhost', done,
|
||||
errorPutCopyPart);
|
||||
});
|
||||
|
||||
|
||||
it('should copy part to file based on request endpoint', done => {
|
||||
copyPutPart(null, null, memLocation, 'localhost', () => {
|
||||
assert.strictEqual(ds.length, 2);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -3,20 +3,17 @@ const async = require('async');
|
|||
const crypto = require('crypto');
|
||||
const { parseString } = require('xml2js');
|
||||
const AWS = require('aws-sdk');
|
||||
const { metadata } = require('arsenal').storage.metadata.inMemory.metadata;
|
||||
const { config } = require('../../lib/Config');
|
||||
const { cleanup, DummyRequestLogger, makeAuthInfo }
|
||||
= require('../unit/helpers');
|
||||
const { cleanup, DummyRequestLogger, makeAuthInfo } = require('../unit/helpers');
|
||||
const { ds } = require('arsenal').storage.data.inMemory.datastore;
|
||||
const { bucketPut } = require('../../lib/api/bucketPut');
|
||||
const initiateMultipartUpload
|
||||
= require('../../lib/api/initiateMultipartUpload');
|
||||
const initiateMultipartUpload = require('../../lib/api/initiateMultipartUpload');
|
||||
const objectPutPart = require('../../lib/api/objectPutPart');
|
||||
const DummyRequest = require('../unit/DummyRequest');
|
||||
const { metadata } = require('arsenal').storage.metadata.inMemory.metadata;
|
||||
const mdWrapper = require('../../lib/metadata/wrapper');
|
||||
const constants = require('../../constants');
|
||||
const { getRealAwsConfig } =
|
||||
require('../functional/aws-node-sdk/test/support/awsConfig');
|
||||
const { getRealAwsConfig } = require('../functional/aws-node-sdk/test/support/awsConfig');
|
||||
|
||||
const memLocation = 'scality-internal-mem';
|
||||
const fileLocation = 'scality-internal-file';
|
||||
|
@ -25,7 +22,7 @@ const awsLocationMismatch = 'awsbackendmismatch';
|
|||
const awsConfig = getRealAwsConfig(awsLocation);
|
||||
const s3 = new AWS.S3(awsConfig);
|
||||
|
||||
const splitter = constants.splitter;
|
||||
const { splitter } = constants;
|
||||
const log = new DummyRequestLogger();
|
||||
const canonicalID = 'accessKey1';
|
||||
const authInfo = makeAuthInfo(canonicalID);
|
||||
|
@ -47,13 +44,13 @@ function _getOverviewKey(objectKey, uploadId) {
|
|||
}
|
||||
|
||||
function putPart(bucketLoc, mpuLoc, requestHost, cb,
|
||||
errorDescription) {
|
||||
errorDescription) {
|
||||
const objectName = `objectName-${Date.now()}`;
|
||||
const post = bucketLoc ? '<?xml version="1.0" encoding="UTF-8"?>' +
|
||||
'<CreateBucketConfiguration ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
`<LocationConstraint>${bucketLoc}</LocationConstraint>` +
|
||||
'</CreateBucketConfiguration>' : '';
|
||||
const post = bucketLoc ? '<?xml version="1.0" encoding="UTF-8"?>'
|
||||
+ '<CreateBucketConfiguration '
|
||||
+ 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ `<LocationConstraint>${bucketLoc}</LocationConstraint>`
|
||||
+ '</CreateBucketConfiguration>' : '';
|
||||
const bucketPutReq = {
|
||||
bucketName,
|
||||
namespace,
|
||||
|
@ -70,10 +67,13 @@ errorDescription) {
|
|||
objectKey: objectName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: `/${objectName}?uploads`,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
if (mpuLoc) {
|
||||
initiateReq.headers = { 'host': `${bucketName}.s3.amazonaws.com`,
|
||||
'x-amz-meta-scal-location-constraint': `${mpuLoc}` };
|
||||
initiateReq.headers = {
|
||||
'host': `${bucketName}.s3.amazonaws.com`,
|
||||
'x-amz-meta-scal-location-constraint': `${mpuLoc}`,
|
||||
};
|
||||
}
|
||||
if (requestHost) {
|
||||
initiateReq.parsedHost = requestHost;
|
||||
|
@ -123,9 +123,9 @@ errorDescription) {
|
|||
const partReq = new DummyRequest(partReqParams, body1);
|
||||
return objectPutPart(authInfo, partReq, undefined, log, err => {
|
||||
assert.strictEqual(err, null);
|
||||
if (bucketLoc !== awsLocation && mpuLoc !== awsLocation &&
|
||||
bucketLoc !== awsLocationMismatch &&
|
||||
mpuLoc !== awsLocationMismatch) {
|
||||
if (bucketLoc !== awsLocation && mpuLoc !== awsLocation
|
||||
&& bucketLoc !== awsLocationMismatch
|
||||
&& mpuLoc !== awsLocationMismatch) {
|
||||
const keysInMPUkeyMap = [];
|
||||
metadata.keyMaps.get(mpuBucket).forEach((val, key) => {
|
||||
keysInMPUkeyMap.push(key);
|
||||
|
@ -138,7 +138,7 @@ errorDescription) {
|
|||
});
|
||||
const partKey = sortedKeyMap[1];
|
||||
const partETag = metadata.keyMaps.get(mpuBucket)
|
||||
.get(partKey)['content-md5'];
|
||||
.get(partKey)['content-md5'];
|
||||
assert.strictEqual(keysInMPUkeyMap.length, 2);
|
||||
assert.strictEqual(partETag, calculatedHash1);
|
||||
}
|
||||
|
@ -148,8 +148,8 @@ errorDescription) {
|
|||
}
|
||||
|
||||
function listAndAbort(uploadId, calculatedHash2, objectName, done) {
|
||||
const awsBucket = config.locationConstraints[awsLocation].
|
||||
details.bucketName;
|
||||
const awsBucket = config.locationConstraints[awsLocation]
|
||||
.details.bucketName;
|
||||
const params = {
|
||||
Bucket: awsBucket,
|
||||
Key: objectName,
|
||||
|
@ -162,167 +162,169 @@ function listAndAbort(uploadId, calculatedHash2, objectName, done) {
|
|||
assert.strictEqual(`"${calculatedHash2}"`, data.Parts[0].ETag);
|
||||
}
|
||||
s3.abortMultipartUpload(params, err => {
|
||||
assert.equal(err, null, `Error aborting MPU: ${err}. ` +
|
||||
`You must abort MPU with upload ID ${uploadId} manually.`);
|
||||
assert.equal(err, null, `Error aborting MPU: ${err}. `
|
||||
+ `You must abort MPU with upload ID ${uploadId} manually.`);
|
||||
done();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
describeSkipIfE2E('objectPutPart API with multiple backends',
|
||||
function testSuite() {
|
||||
this.timeout(5000);
|
||||
function testSuite() {
|
||||
this.timeout(5000);
|
||||
|
||||
beforeEach(() => {
|
||||
cleanup();
|
||||
});
|
||||
beforeEach(() => {
|
||||
cleanup();
|
||||
});
|
||||
|
||||
it('should upload a part to file based on mpu location', done => {
|
||||
putPart(memLocation, fileLocation, 'localhost', () => {
|
||||
it('should upload a part to file based on mpu location', done => {
|
||||
putPart(memLocation, fileLocation, 'localhost', () => {
|
||||
// if ds is empty, the object is not in mem, which means it
|
||||
// must be in file because those are the only possibilities
|
||||
// for unit tests
|
||||
assert.deepStrictEqual(ds, []);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to mem based on mpu location', done => {
|
||||
putPart(fileLocation, memLocation, 'localhost', () => {
|
||||
assert.deepStrictEqual(ds[1].value, body1);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to AWS based on mpu location', done => {
|
||||
putPart(fileLocation, awsLocation, 'localhost',
|
||||
(objectName, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
listAndAbort(uploadId, null, objectName, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('should replace part if two parts uploaded with same part number to AWS',
|
||||
done => {
|
||||
putPart(fileLocation, awsLocation, 'localhost',
|
||||
(objectName, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
const partReqParams = {
|
||||
bucketName,
|
||||
namespace,
|
||||
objectKey: objectName,
|
||||
headers: { 'host': `${bucketName}.s3.amazonaws.com`,
|
||||
'x-amz-meta-scal-location-constraint': awsLocation },
|
||||
url: `/${objectName}?partNumber=1&uploadId=${uploadId}`,
|
||||
query: {
|
||||
partNumber: '1', uploadId,
|
||||
},
|
||||
};
|
||||
const partReq = new DummyRequest(partReqParams, body2);
|
||||
objectPutPart(authInfo, partReq, undefined, log, err => {
|
||||
assert.equal(err, null, `Error putting second part: ${err}`);
|
||||
listAndAbort(uploadId, calculatedHash2, objectName, done);
|
||||
assert.deepStrictEqual(ds, []);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should upload part based on mpu location even if part ' +
|
||||
'location constraint is specified ', done => {
|
||||
putPart(fileLocation, memLocation, 'localhost', () => {
|
||||
assert.deepStrictEqual(ds[1].value, body1);
|
||||
done();
|
||||
it('should put a part to mem based on mpu location', done => {
|
||||
putPart(fileLocation, memLocation, 'localhost', () => {
|
||||
assert.deepStrictEqual(ds[1].value, body1);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to file based on bucket location', done => {
|
||||
putPart(fileLocation, null, 'localhost', () => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to mem based on bucket location', done => {
|
||||
putPart(memLocation, null, 'localhost', () => {
|
||||
assert.deepStrictEqual(ds[1].value, body1);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to AWS based on bucket location', done => {
|
||||
putPart(awsLocation, null, 'localhost',
|
||||
(objectName, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
listAndAbort(uploadId, null, objectName, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to AWS based on bucket location with bucketMatch ' +
|
||||
'set to true', done => {
|
||||
putPart(null, awsLocation, 'localhost',
|
||||
(objectName, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
listAndAbort(uploadId, null, objectName, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to AWS based on bucket location with bucketMatch ' +
|
||||
'set to false', done => {
|
||||
putPart(null, awsLocationMismatch, 'localhost',
|
||||
(objectName, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
listAndAbort(uploadId, null, `${bucketName}/${objectName}`, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to file based on request endpoint', done => {
|
||||
putPart(null, null, 'localhost', () => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should store a part even if the MPU was initiated on legacy version',
|
||||
done => {
|
||||
putPart('scality-internal-mem', null, 'localhost',
|
||||
(objectKey, uploadId) => {
|
||||
const mputOverviewKey = _getOverviewKey(objectKey, uploadId);
|
||||
mdWrapper.getObjectMD(mpuBucket, mputOverviewKey, {}, log,
|
||||
(err, res) => {
|
||||
// remove location constraint to mimic legacy behvior
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
res.controllingLocationConstraint = undefined;
|
||||
const md5Hash = crypto.createHash('md5');
|
||||
const bufferBody = Buffer.from(body1);
|
||||
const calculatedHash = md5Hash.update(bufferBody).digest('hex');
|
||||
const partRequest = new DummyRequest({
|
||||
bucketName,
|
||||
namespace,
|
||||
objectKey,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: `/${objectKey}?partNumber=1&uploadId=${uploadId}`,
|
||||
query: { partNumber: '1', uploadId },
|
||||
calculatedHash,
|
||||
}, body1);
|
||||
objectPutPart(authInfo, partRequest, undefined, log, err => {
|
||||
assert.strictEqual(err, null);
|
||||
const keysInMPUkeyMap = [];
|
||||
metadata.keyMaps.get(mpuBucket).forEach((val, key) => {
|
||||
keysInMPUkeyMap.push(key);
|
||||
});
|
||||
const sortedKeyMap = keysInMPUkeyMap.sort(a => {
|
||||
if (a.slice(0, 8) === 'overview') {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
const partKey = sortedKeyMap[1];
|
||||
const partETag = metadata.keyMaps.get(mpuBucket)
|
||||
.get(partKey)['content-md5'];
|
||||
assert.strictEqual(keysInMPUkeyMap.length, 2);
|
||||
assert.strictEqual(partETag, calculatedHash);
|
||||
done();
|
||||
it('should put a part to AWS based on mpu location', done => {
|
||||
putPart(fileLocation, awsLocation, 'localhost',
|
||||
(objectName, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
listAndAbort(uploadId, null, objectName, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('should replace part if two parts uploaded with same part number to AWS',
|
||||
done => {
|
||||
putPart(fileLocation, awsLocation, 'localhost',
|
||||
(objectName, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
const partReqParams = {
|
||||
bucketName,
|
||||
namespace,
|
||||
objectKey: objectName,
|
||||
headers: {
|
||||
'host': `${bucketName}.s3.amazonaws.com`,
|
||||
'x-amz-meta-scal-location-constraint': awsLocation,
|
||||
},
|
||||
url: `/${objectName}?partNumber=1&uploadId=${uploadId}`,
|
||||
query: {
|
||||
partNumber: '1', uploadId,
|
||||
},
|
||||
};
|
||||
const partReq = new DummyRequest(partReqParams, body2);
|
||||
objectPutPart(authInfo, partReq, undefined, log, err => {
|
||||
assert.equal(err, null, `Error putting second part: ${err}`);
|
||||
listAndAbort(uploadId, calculatedHash2, objectName, done);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should upload part based on mpu location even if part '
|
||||
+ 'location constraint is specified ', done => {
|
||||
putPart(fileLocation, memLocation, 'localhost', () => {
|
||||
assert.deepStrictEqual(ds[1].value, body1);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to file based on bucket location', done => {
|
||||
putPart(fileLocation, null, 'localhost', () => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to mem based on bucket location', done => {
|
||||
putPart(memLocation, null, 'localhost', () => {
|
||||
assert.deepStrictEqual(ds[1].value, body1);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to AWS based on bucket location', done => {
|
||||
putPart(awsLocation, null, 'localhost',
|
||||
(objectName, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
listAndAbort(uploadId, null, objectName, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to AWS based on bucket location with bucketMatch '
|
||||
+ 'set to true', done => {
|
||||
putPart(null, awsLocation, 'localhost',
|
||||
(objectName, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
listAndAbort(uploadId, null, objectName, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to AWS based on bucket location with bucketMatch '
|
||||
+ 'set to false', done => {
|
||||
putPart(null, awsLocationMismatch, 'localhost',
|
||||
(objectName, uploadId) => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
listAndAbort(uploadId, null, `${bucketName}/${objectName}`, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('should put a part to file based on request endpoint', done => {
|
||||
putPart(null, null, 'localhost', () => {
|
||||
assert.deepStrictEqual(ds, []);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should store a part even if the MPU was initiated on legacy version',
|
||||
done => {
|
||||
putPart('scality-internal-mem', null, 'localhost',
|
||||
(objectKey, uploadId) => {
|
||||
const mputOverviewKey = _getOverviewKey(objectKey, uploadId);
|
||||
mdWrapper.getObjectMD(mpuBucket, mputOverviewKey, {}, log,
|
||||
(err, res) => {
|
||||
// remove location constraint to mimic legacy behvior
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
res.controllingLocationConstraint = undefined;
|
||||
const md5Hash = crypto.createHash('md5');
|
||||
const bufferBody = Buffer.from(body1);
|
||||
const calculatedHash = md5Hash.update(bufferBody).digest('hex');
|
||||
const partRequest = new DummyRequest({
|
||||
bucketName,
|
||||
namespace,
|
||||
objectKey,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: `/${objectKey}?partNumber=1&uploadId=${uploadId}`,
|
||||
query: { partNumber: '1', uploadId },
|
||||
calculatedHash,
|
||||
}, body1);
|
||||
objectPutPart(authInfo, partRequest, undefined, log, err => {
|
||||
assert.strictEqual(err, null);
|
||||
const keysInMPUkeyMap = [];
|
||||
metadata.keyMaps.get(mpuBucket).forEach((val, key) => {
|
||||
keysInMPUkeyMap.push(key);
|
||||
});
|
||||
const sortedKeyMap = keysInMPUkeyMap.sort(a => {
|
||||
if (a.slice(0, 8) === 'overview') {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
const partKey = sortedKeyMap[1];
|
||||
const partETag = metadata.keyMaps.get(mpuBucket)
|
||||
.get(partKey)['content-md5'];
|
||||
assert.strictEqual(keysInMPUkeyMap.length, 2);
|
||||
assert.strictEqual(partETag, calculatedHash);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -16,7 +16,7 @@ class DummyRequest extends http.IncomingMessage {
|
|||
this.parsedContentLength = 0;
|
||||
}
|
||||
}
|
||||
|
||||
this.actionImplicitDenies = false;
|
||||
if (Array.isArray(msg)) {
|
||||
msg.forEach(part => {
|
||||
this.push(part);
|
||||
|
|
|
@ -24,6 +24,7 @@ const bucketPutReq = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const taggingUtil = new TaggingConfigTester();
|
||||
|
|
|
@ -88,6 +88,7 @@ describe('bucketDelete API', () => {
|
|||
namespace,
|
||||
headers: {},
|
||||
url: `/${bucketName}`,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const initiateRequest = {
|
||||
|
@ -96,6 +97,7 @@ describe('bucketDelete API', () => {
|
|||
objectKey: objectName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: `/${objectName}?uploads`,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
it('should return an error if the bucket is not empty', done => {
|
||||
|
|
|
@ -19,6 +19,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const testBucketPutCorsRequest =
|
||||
corsUtil.createBucketCorsRequest('PUT', bucketName);
|
||||
|
|
|
@ -13,6 +13,7 @@ const bucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
describe('bucketDeleteEncryption API', () => {
|
||||
|
|
|
@ -19,6 +19,7 @@ function _makeRequest(includeXml) {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
if (includeXml) {
|
||||
request.post = '<LifecycleConfiguration ' +
|
||||
|
|
|
@ -19,6 +19,7 @@ function _makeRequest(includePolicy) {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
if (includePolicy) {
|
||||
const examplePolicy = {
|
||||
|
|
|
@ -20,6 +20,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const testBucketDeleteWebsiteRequest = {
|
||||
bucketName,
|
||||
|
@ -28,6 +29,7 @@ const testBucketDeleteWebsiteRequest = {
|
|||
},
|
||||
url: '/?website',
|
||||
query: { website: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const testBucketPutWebsiteRequest = Object.assign({ post: config.getXml() },
|
||||
testBucketDeleteWebsiteRequest);
|
||||
|
|
|
@ -63,6 +63,7 @@ const baseGetRequest = {
|
|||
bucketName,
|
||||
namespace,
|
||||
headers: { host: '/' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const baseUrl = `/${bucketName}`;
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ describe('bucketGetACL API', () => {
|
|||
namespace,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const testGetACLRequest = {
|
||||
bucketName,
|
||||
|
@ -32,6 +33,7 @@ describe('bucketGetACL API', () => {
|
|||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
it('should get a canned private ACL', done => {
|
||||
|
@ -44,6 +46,7 @@ describe('bucketGetACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
async.waterfall([
|
||||
|
@ -76,6 +79,7 @@ describe('bucketGetACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
async.waterfall([
|
||||
|
@ -119,6 +123,7 @@ describe('bucketGetACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
async.waterfall([
|
||||
|
@ -156,6 +161,7 @@ describe('bucketGetACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
async.waterfall([
|
||||
|
@ -194,6 +200,7 @@ describe('bucketGetACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
async.waterfall([
|
||||
|
@ -248,6 +255,7 @@ describe('bucketGetACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const canonicalIDforSample1 =
|
||||
'79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be';
|
||||
|
@ -338,6 +346,7 @@ describe('bucketGetACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
async.waterfall([
|
||||
|
@ -377,6 +386,7 @@ describe('bucketGetACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
async.waterfall([
|
||||
|
|
|
@ -16,6 +16,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
function _makeCorsRequest(xml) {
|
||||
|
@ -26,6 +27,7 @@ function _makeCorsRequest(xml) {
|
|||
},
|
||||
url: '/?cors',
|
||||
query: { cors: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
if (xml) {
|
||||
|
|
|
@ -17,6 +17,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
describe('getBucketLifecycle API', () => {
|
||||
|
|
|
@ -16,6 +16,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const testGetLocationRequest = {
|
||||
|
@ -25,6 +26,7 @@ const testGetLocationRequest = {
|
|||
},
|
||||
url: '/?location',
|
||||
query: { location: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const locationConstraints = config.locationConstraints;
|
||||
|
|
|
@ -15,6 +15,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
function getNotificationRequest(bucketName, xml) {
|
||||
|
@ -23,6 +24,7 @@ function getNotificationRequest(bucketName, xml) {
|
|||
headers: {
|
||||
host: `${bucketName}.s3.amazonaws.com`,
|
||||
},
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
if (xml) {
|
||||
request.post = xml;
|
||||
|
|
|
@ -14,6 +14,7 @@ const bucketPutReq = {
|
|||
host: `${bucketName}.s3.amazonaws.com`,
|
||||
},
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const testBucketPutReqWithObjLock = {
|
||||
|
@ -23,6 +24,7 @@ const testBucketPutReqWithObjLock = {
|
|||
'x-amz-bucket-object-lock-enabled': 'True',
|
||||
},
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
function getObjectLockConfigRequest(bucketName, xml) {
|
||||
|
@ -33,6 +35,7 @@ function getObjectLockConfigRequest(bucketName, xml) {
|
|||
'x-amz-bucket-object-lock-enabled': 'true',
|
||||
},
|
||||
url: '/?object-lock',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
if (xml) {
|
||||
request.post = xml;
|
||||
|
|
|
@ -16,6 +16,7 @@ const testBasicRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const expectedBucketPolicy = {
|
||||
|
@ -34,6 +35,7 @@ const testPutPolicyRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: JSON.stringify(expectedBucketPolicy),
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
describe('getBucketPolicy API', () => {
|
||||
|
|
|
@ -15,6 +15,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
function _makeWebsiteRequest(xml) {
|
||||
|
@ -25,6 +26,7 @@ function _makeWebsiteRequest(xml) {
|
|||
},
|
||||
url: '/?website',
|
||||
query: { website: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
if (xml) {
|
||||
|
|
|
@ -244,7 +244,7 @@ describe('bucket policy authorization', () => {
|
|||
describe('isBucketAuthorized with no policy set', () => {
|
||||
it('should allow access to bucket owner', done => {
|
||||
const allowed = isBucketAuthorized(bucket, 'bucketPut',
|
||||
bucketOwnerCanonicalId, null, log);
|
||||
bucketOwnerCanonicalId, null, false, log);
|
||||
assert.equal(allowed, true);
|
||||
done();
|
||||
});
|
||||
|
@ -252,7 +252,7 @@ describe('bucket policy authorization', () => {
|
|||
it('should deny access to non-bucket owner',
|
||||
done => {
|
||||
const allowed = isBucketAuthorized(bucket, 'bucketPut',
|
||||
altAcctCanonicalId, null, log);
|
||||
altAcctCanonicalId, null, false, log);
|
||||
assert.equal(allowed, false);
|
||||
done();
|
||||
});
|
||||
|
@ -268,7 +268,7 @@ describe('bucket policy authorization', () => {
|
|||
it('should allow access to non-bucket owner if principal is set to "*"',
|
||||
done => {
|
||||
const allowed = isBucketAuthorized(bucket, bucAction,
|
||||
altAcctCanonicalId, null, log);
|
||||
altAcctCanonicalId, null, log, null, false);
|
||||
assert.equal(allowed, true);
|
||||
done();
|
||||
});
|
||||
|
@ -276,7 +276,7 @@ describe('bucket policy authorization', () => {
|
|||
it('should allow access to public user if principal is set to "*"',
|
||||
done => {
|
||||
const allowed = isBucketAuthorized(bucket, bucAction,
|
||||
constants.publicId, null, log);
|
||||
constants.publicId, null, log, null, false);
|
||||
assert.equal(allowed, true);
|
||||
done();
|
||||
});
|
||||
|
@ -287,7 +287,7 @@ describe('bucket policy authorization', () => {
|
|||
newPolicy.Statement[0][t.keyToChange] = t.bucketValue;
|
||||
bucket.setBucketPolicy(newPolicy);
|
||||
const allowed = isBucketAuthorized(bucket, bucAction,
|
||||
t.bucketId, t.bucketAuthInfo, log);
|
||||
t.bucketId, t.bucketAuthInfo, log, null, false);
|
||||
assert.equal(allowed, t.expected);
|
||||
done();
|
||||
});
|
||||
|
@ -304,7 +304,7 @@ describe('bucket policy authorization', () => {
|
|||
};
|
||||
bucket.setBucketPolicy(newPolicy);
|
||||
const allowed = isBucketAuthorized(bucket, bucAction,
|
||||
altAcctCanonicalId, null, log);
|
||||
altAcctCanonicalId, null, log, null, false);
|
||||
assert.equal(allowed, false);
|
||||
done();
|
||||
});
|
||||
|
@ -312,7 +312,7 @@ describe('bucket policy authorization', () => {
|
|||
it('should deny access to non-bucket owner with an unsupported action type',
|
||||
done => {
|
||||
const allowed = isBucketAuthorized(bucket, 'unsupportedAction',
|
||||
altAcctCanonicalId, null, log);
|
||||
altAcctCanonicalId, null, log, null, false);
|
||||
assert.equal(allowed, false);
|
||||
done();
|
||||
});
|
||||
|
@ -325,7 +325,7 @@ describe('bucket policy authorization', () => {
|
|||
|
||||
it('should allow access to object owner', done => {
|
||||
const allowed = isObjAuthorized(bucket, object, objAction,
|
||||
objectOwnerCanonicalId, null, log);
|
||||
objectOwnerCanonicalId, null, log, null, false);
|
||||
assert.equal(allowed, true);
|
||||
done();
|
||||
});
|
||||
|
@ -333,7 +333,7 @@ describe('bucket policy authorization', () => {
|
|||
it('should deny access to non-object owner',
|
||||
done => {
|
||||
const allowed = isObjAuthorized(bucket, object, objAction,
|
||||
altAcctCanonicalId, null, log);
|
||||
altAcctCanonicalId, null, log, null, false);
|
||||
assert.equal(allowed, false);
|
||||
done();
|
||||
});
|
||||
|
@ -352,7 +352,7 @@ describe('bucket policy authorization', () => {
|
|||
it('should allow access to non-object owner if principal is set to "*"',
|
||||
done => {
|
||||
const allowed = isObjAuthorized(bucket, object, objAction,
|
||||
altAcctCanonicalId, null, log);
|
||||
altAcctCanonicalId, null, log, null, false);
|
||||
assert.equal(allowed, true);
|
||||
done();
|
||||
});
|
||||
|
@ -360,7 +360,7 @@ describe('bucket policy authorization', () => {
|
|||
it('should allow access to public user if principal is set to "*"',
|
||||
done => {
|
||||
const allowed = isObjAuthorized(bucket, object, objAction,
|
||||
constants.publicId, null, log);
|
||||
constants.publicId, null, log, null, false);
|
||||
assert.equal(allowed, true);
|
||||
done();
|
||||
});
|
||||
|
@ -371,7 +371,7 @@ describe('bucket policy authorization', () => {
|
|||
newPolicy.Statement[0][t.keyToChange] = t.objectValue;
|
||||
bucket.setBucketPolicy(newPolicy);
|
||||
const allowed = isObjAuthorized(bucket, object, objAction,
|
||||
t.objectId, t.objectAuthInfo, log);
|
||||
t.objectId, t.objectAuthInfo, log, null, false);
|
||||
assert.equal(allowed, t.expected);
|
||||
done();
|
||||
});
|
||||
|
@ -383,7 +383,7 @@ describe('bucket policy authorization', () => {
|
|||
newPolicy.Statement[0].Action = ['s3:GetObject'];
|
||||
bucket.setBucketPolicy(newPolicy);
|
||||
const allowed = isObjAuthorized(bucket, object, 'objectHead',
|
||||
altAcctCanonicalId, altAcctAuthInfo, log);
|
||||
altAcctCanonicalId, altAcctAuthInfo, log, null, false);
|
||||
assert.equal(allowed, true);
|
||||
done();
|
||||
});
|
||||
|
@ -393,7 +393,7 @@ describe('bucket policy authorization', () => {
|
|||
newPolicy.Statement[0].Action = ['s3:PutObject'];
|
||||
bucket.setBucketPolicy(newPolicy);
|
||||
const allowed = isObjAuthorized(bucket, object, 'objectHead',
|
||||
altAcctCanonicalId, altAcctAuthInfo, log);
|
||||
altAcctCanonicalId, altAcctAuthInfo, log, null, false);
|
||||
assert.equal(allowed, false);
|
||||
done();
|
||||
});
|
||||
|
@ -408,7 +408,7 @@ describe('bucket policy authorization', () => {
|
|||
};
|
||||
bucket.setBucketPolicy(newPolicy);
|
||||
const allowed = isObjAuthorized(bucket, object, objAction,
|
||||
altAcctCanonicalId, null, log);
|
||||
altAcctCanonicalId, null, log, null, false);
|
||||
assert.equal(allowed, false);
|
||||
done();
|
||||
});
|
||||
|
@ -416,7 +416,7 @@ describe('bucket policy authorization', () => {
|
|||
it('should deny access to non-object owner with an unsupported action type',
|
||||
done => {
|
||||
const allowed = isObjAuthorized(bucket, object, 'unsupportedAction',
|
||||
altAcctCanonicalId, null, log);
|
||||
altAcctCanonicalId, null, log, null, false);
|
||||
assert.equal(allowed, false);
|
||||
done();
|
||||
});
|
||||
|
|
|
@ -18,11 +18,10 @@ const testBucketPutRequest = {
|
|||
namespace,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const canonicalIDforSample1 =
|
||||
'79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be';
|
||||
const canonicalIDforSample2 =
|
||||
'79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2bf';
|
||||
const canonicalIDforSample1 = '79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be';
|
||||
const canonicalIDforSample2 = '79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2bf';
|
||||
|
||||
const invalidIds = {
|
||||
'too short': 'id="invalid_id"',
|
||||
|
@ -42,11 +41,10 @@ describe('putBucketACL API', () => {
|
|||
afterEach(() => cleanup());
|
||||
|
||||
it('should parse a grantheader', () => {
|
||||
const grantRead =
|
||||
`uri=${constants.logId}, ` +
|
||||
'emailAddress="test@testing.com", ' +
|
||||
'emailAddress="test2@testly.com", ' +
|
||||
'id="sdfsdfsfwwiieohefs"';
|
||||
const grantRead = `uri=${constants.logId}, `
|
||||
+ 'emailAddress="test@testing.com", '
|
||||
+ 'emailAddress="test2@testly.com", '
|
||||
+ 'id="sdfsdfsfwwiieohefs"';
|
||||
const grantReadHeader = aclUtils.parseGrant(grantRead, 'read');
|
||||
const firstIdentifier = grantReadHeader[0].identifier;
|
||||
assert.strictEqual(firstIdentifier, constants.logId);
|
||||
|
@ -58,7 +56,7 @@ describe('putBucketACL API', () => {
|
|||
assert.strictEqual(fourthIdentifier, 'sdfsdfsfwwiieohefs');
|
||||
const fourthType = grantReadHeader[3].userIDType;
|
||||
assert.strictEqual(fourthType, 'id');
|
||||
const grantType = grantReadHeader[3].grantType;
|
||||
const { grantType } = grantReadHeader[3];
|
||||
assert.strictEqual(grantType, 'read');
|
||||
});
|
||||
|
||||
|
@ -72,6 +70,7 @@ describe('putBucketACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
|
@ -90,6 +89,7 @@ describe('putBucketACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.strictEqual(err, undefined);
|
||||
|
@ -111,6 +111,7 @@ describe('putBucketACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const testACLRequest2 = {
|
||||
bucketName,
|
||||
|
@ -121,6 +122,7 @@ describe('putBucketACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.strictEqual(err, undefined);
|
||||
|
@ -130,7 +132,7 @@ describe('putBucketACL API', () => {
|
|||
assert.strictEqual(err, undefined);
|
||||
metadata.getBucket(bucketName, log, (err, md) => {
|
||||
assert.strictEqual(md.getAcl().Canned,
|
||||
'authenticated-read');
|
||||
'authenticated-read');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -138,8 +140,8 @@ describe('putBucketACL API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should set a canned private ACL ' +
|
||||
'followed by a log-delivery-write ACL', done => {
|
||||
it('should set a canned private ACL '
|
||||
+ 'followed by a log-delivery-write ACL', done => {
|
||||
const testACLRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
|
@ -149,6 +151,7 @@ describe('putBucketACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const testACLRequest2 = {
|
||||
bucketName,
|
||||
|
@ -159,6 +162,7 @@ describe('putBucketACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
|
@ -169,7 +173,7 @@ describe('putBucketACL API', () => {
|
|||
assert.strictEqual(err, undefined);
|
||||
metadata.getBucket(bucketName, log, (err, md) => {
|
||||
assert.strictEqual(md.getAcl().Canned,
|
||||
'log-delivery-write');
|
||||
'log-delivery-write');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -184,19 +188,20 @@ describe('putBucketACL API', () => {
|
|||
headers: {
|
||||
'host': `${bucketName}.s3.amazonaws.com`,
|
||||
'x-amz-grant-full-control':
|
||||
'emailaddress="sampleaccount1@sampling.com"' +
|
||||
',emailaddress="sampleaccount2@sampling.com"',
|
||||
'emailaddress="sampleaccount1@sampling.com"'
|
||||
+ ',emailaddress="sampleaccount2@sampling.com"',
|
||||
'x-amz-grant-read': `uri=${constants.logId}`,
|
||||
'x-amz-grant-write': `uri=${constants.publicId}`,
|
||||
'x-amz-grant-read-acp':
|
||||
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac' +
|
||||
'f8f8d5218e7cd47ef2be',
|
||||
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac'
|
||||
+ 'f8f8d5218e7cd47ef2be',
|
||||
'x-amz-grant-write-acp':
|
||||
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac' +
|
||||
'f8f8d5218e7cd47ef2bf',
|
||||
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac'
|
||||
+ 'f8f8d5218e7cd47ef2bf',
|
||||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.strictEqual(err, undefined);
|
||||
|
@ -223,21 +228,22 @@ describe('putBucketACL API', () => {
|
|||
headers: {
|
||||
'host': `${bucketName}.s3.amazonaws.com`,
|
||||
'x-amz-grant-full-control':
|
||||
'emailaddress="sampleaccount1@sampling.com"' +
|
||||
',emailaddress="sampleaccount2@sampling.com"',
|
||||
'emailaddress="sampleaccount1@sampling.com"'
|
||||
+ ',emailaddress="sampleaccount2@sampling.com"',
|
||||
'x-amz-grant-read':
|
||||
'emailaddress="sampleaccount1@sampling.com"',
|
||||
'x-amz-grant-write':
|
||||
'emailaddress="sampleaccount1@sampling.com"',
|
||||
'x-amz-grant-read-acp':
|
||||
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac' +
|
||||
'f8f8d5218e7cd47ef2be',
|
||||
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac'
|
||||
+ 'f8f8d5218e7cd47ef2be',
|
||||
'x-amz-grant-write-acp':
|
||||
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac' +
|
||||
'f8f8d5218e7cd47ef2bf',
|
||||
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac'
|
||||
+ 'f8f8d5218e7cd47ef2bf',
|
||||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.strictEqual(err, undefined);
|
||||
|
@ -260,8 +266,8 @@ describe('putBucketACL API', () => {
|
|||
});
|
||||
|
||||
Object.keys(invalidIds).forEach(idType => {
|
||||
it('should return an error if grantee canonical ID provided in ACL ' +
|
||||
`request invalid because ${idType}`, done => {
|
||||
it('should return an error if grantee canonical ID provided in ACL '
|
||||
+ `request invalid because ${idType}`, done => {
|
||||
const testACLRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
|
@ -271,6 +277,7 @@ describe('putBucketACL API', () => {
|
|||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
return bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||
|
@ -279,19 +286,20 @@ describe('putBucketACL API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should return an error if invalid email ' +
|
||||
'provided in ACL header request', done => {
|
||||
it('should return an error if invalid email '
|
||||
+ 'provided in ACL header request', done => {
|
||||
const testACLRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
headers: {
|
||||
'host': `${bucketName}.s3.amazonaws.com`,
|
||||
'x-amz-grant-full-control':
|
||||
'emailaddress="sampleaccount1@sampling.com"' +
|
||||
',emailaddress="nonexistentEmail@sampling.com"',
|
||||
'emailaddress="sampleaccount1@sampling.com"'
|
||||
+ ',emailaddress="nonexistentEmail@sampling.com"',
|
||||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
|
@ -305,52 +313,53 @@ describe('putBucketACL API', () => {
|
|||
bucketName,
|
||||
namespace,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: '<AccessControlPolicy xmlns=' +
|
||||
'"http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Owner>' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'</Owner>' +
|
||||
'<AccessControlList>' +
|
||||
'<Grant>' +
|
||||
'<Grantee xsi:type="CanonicalUser">' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'</Grantee>' +
|
||||
'<Permission>FULL_CONTROL</Permission>' +
|
||||
'</Grant>' +
|
||||
'<Grant>' +
|
||||
'<Grantee xsi:type="Group">' +
|
||||
`<URI>${constants.publicId}</URI>` +
|
||||
'</Grantee>' +
|
||||
'<Permission>READ</Permission>' +
|
||||
'</Grant>' +
|
||||
'<Grant>' +
|
||||
'<Grantee xsi:type="Group">' +
|
||||
`<URI>${constants.logId}</URI>` +
|
||||
'</Grantee>' +
|
||||
'<Permission>WRITE</Permission>' +
|
||||
'</Grant>' +
|
||||
'<Grant>' +
|
||||
'<Grantee xsi:type="AmazonCustomerByEmail">' +
|
||||
'<EmailAddress>sampleaccount1@sampling.com' +
|
||||
'</EmailAddress>' +
|
||||
'</Grantee>' +
|
||||
'<Permission>WRITE_ACP</Permission>' +
|
||||
'</Grant>' +
|
||||
'<Grant>' +
|
||||
'<Grantee xsi:type="CanonicalUser">' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbacedfd' +
|
||||
'6e09d98eacf8f8d5218e7cd47ef2bf</ID>' +
|
||||
'</Grantee>' +
|
||||
'<Permission>READ_ACP</Permission>' +
|
||||
'</Grant>' +
|
||||
'</AccessControlList>' +
|
||||
'</AccessControlPolicy>',
|
||||
post: '<AccessControlPolicy xmlns='
|
||||
+ '"http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Owner>'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '</Owner>'
|
||||
+ '<AccessControlList>'
|
||||
+ '<Grant>'
|
||||
+ '<Grantee xsi:type="CanonicalUser">'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '</Grantee>'
|
||||
+ '<Permission>FULL_CONTROL</Permission>'
|
||||
+ '</Grant>'
|
||||
+ '<Grant>'
|
||||
+ '<Grantee xsi:type="Group">'
|
||||
+ `<URI>${constants.publicId}</URI>`
|
||||
+ '</Grantee>'
|
||||
+ '<Permission>READ</Permission>'
|
||||
+ '</Grant>'
|
||||
+ '<Grant>'
|
||||
+ '<Grantee xsi:type="Group">'
|
||||
+ `<URI>${constants.logId}</URI>`
|
||||
+ '</Grantee>'
|
||||
+ '<Permission>WRITE</Permission>'
|
||||
+ '</Grant>'
|
||||
+ '<Grant>'
|
||||
+ '<Grantee xsi:type="AmazonCustomerByEmail">'
|
||||
+ '<EmailAddress>sampleaccount1@sampling.com'
|
||||
+ '</EmailAddress>'
|
||||
+ '</Grantee>'
|
||||
+ '<Permission>WRITE_ACP</Permission>'
|
||||
+ '</Grant>'
|
||||
+ '<Grant>'
|
||||
+ '<Grantee xsi:type="CanonicalUser">'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbacedfd'
|
||||
+ '6e09d98eacf8f8d5218e7cd47ef2bf</ID>'
|
||||
+ '</Grantee>'
|
||||
+ '<Permission>READ_ACP</Permission>'
|
||||
+ '</Grant>'
|
||||
+ '</AccessControlList>'
|
||||
+ '</AccessControlPolicy>',
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
|
@ -362,7 +371,7 @@ describe('putBucketACL API', () => {
|
|||
assert.strictEqual(md.getAcl().READ[0], constants.publicId);
|
||||
assert.strictEqual(md.getAcl().WRITE[0], constants.logId);
|
||||
assert.strictEqual(md.getAcl().WRITE_ACP[0],
|
||||
canonicalIDforSample1);
|
||||
canonicalIDforSample1);
|
||||
assert.strictEqual(md.getAcl().READ_ACP[0],
|
||||
canonicalIDforSample2);
|
||||
done();
|
||||
|
@ -375,17 +384,18 @@ describe('putBucketACL API', () => {
|
|||
bucketName,
|
||||
namespace,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: '<AccessControlPolicy xmlns=' +
|
||||
'"http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Owner>' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'</Owner>' +
|
||||
'<AccessControlList></AccessControlList>' +
|
||||
'</AccessControlPolicy>',
|
||||
post: '<AccessControlPolicy xmlns='
|
||||
+ '"http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Owner>'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '</Owner>'
|
||||
+ '<AccessControlList></AccessControlList>'
|
||||
+ '</AccessControlPolicy>',
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
|
@ -403,64 +413,66 @@ describe('putBucketACL API', () => {
|
|||
});
|
||||
|
||||
it('should not be able to set ACLs without AccessControlList section',
|
||||
done => {
|
||||
const testACLRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: '<AccessControlPolicy xmlns=' +
|
||||
'"http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Owner>' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'</Owner>' +
|
||||
'</AccessControlPolicy>',
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
};
|
||||
done => {
|
||||
const testACLRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: '<AccessControlPolicy xmlns='
|
||||
+ '"http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Owner>'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '</Owner>'
|
||||
+ '</AccessControlPolicy>',
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.MalformedACLError);
|
||||
done();
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.MalformedACLError);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should return an error if multiple AccessControlList section', done => {
|
||||
const testACLRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: '<AccessControlPolicy xmlns=' +
|
||||
'"http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Owner>' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'</Owner>' +
|
||||
'<AccessControlList>' +
|
||||
'<Grant>' +
|
||||
'<Grantee xsi:type="CanonicalUser">' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'</Grantee>' +
|
||||
'<Permission>FULL_CONTROL</Permission>' +
|
||||
'</Grant>' +
|
||||
'</AccessControlList>' +
|
||||
'<AccessControlList>' +
|
||||
'<Grant>' +
|
||||
'<Grantee xsi:type="CanonicalUser">' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'</Grantee>' +
|
||||
'<Permission>READ</Permission>' +
|
||||
'</Grant>' +
|
||||
'</AccessControlList>' +
|
||||
'</AccessControlPolicy>',
|
||||
post: '<AccessControlPolicy xmlns='
|
||||
+ '"http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Owner>'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '</Owner>'
|
||||
+ '<AccessControlList>'
|
||||
+ '<Grant>'
|
||||
+ '<Grantee xsi:type="CanonicalUser">'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '</Grantee>'
|
||||
+ '<Permission>FULL_CONTROL</Permission>'
|
||||
+ '</Grant>'
|
||||
+ '</AccessControlList>'
|
||||
+ '<AccessControlList>'
|
||||
+ '<Grant>'
|
||||
+ '<Grantee xsi:type="CanonicalUser">'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '</Grantee>'
|
||||
+ '<Permission>READ</Permission>'
|
||||
+ '</Grant>'
|
||||
+ '</AccessControlList>'
|
||||
+ '</AccessControlPolicy>',
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
|
@ -469,30 +481,31 @@ describe('putBucketACL API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should return an error if invalid grantee user ID ' +
|
||||
'provided in ACL request body', done => {
|
||||
it('should return an error if invalid grantee user ID '
|
||||
+ 'provided in ACL request body', done => {
|
||||
const testACLRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: '<AccessControlPolicy xmlns=' +
|
||||
'"http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Owner>' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'</Owner>' +
|
||||
'<AccessControlList>' +
|
||||
'<Grant>' +
|
||||
'<Grantee xsi:type="CanonicalUser">' +
|
||||
'<ID>invalid_id</ID>' +
|
||||
'</Grantee>' +
|
||||
'<Permission>READ_ACP</Permission>' +
|
||||
'</Grant>' +
|
||||
'</AccessControlList>' +
|
||||
'</AccessControlPolicy>',
|
||||
post: '<AccessControlPolicy xmlns='
|
||||
+ '"http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Owner>'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '</Owner>'
|
||||
+ '<AccessControlList>'
|
||||
+ '<Grant>'
|
||||
+ '<Grantee xsi:type="CanonicalUser">'
|
||||
+ '<ID>invalid_id</ID>'
|
||||
+ '</Grantee>'
|
||||
+ '<Permission>READ_ACP</Permission>'
|
||||
+ '</Grant>'
|
||||
+ '</AccessControlList>'
|
||||
+ '</AccessControlPolicy>',
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
return bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
|
@ -501,30 +514,31 @@ describe('putBucketACL API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should return an error if invalid email ' +
|
||||
'address provided in ACLs set out in request body', done => {
|
||||
it('should return an error if invalid email '
|
||||
+ 'address provided in ACLs set out in request body', done => {
|
||||
const testACLRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: '<AccessControlPolicy xmlns=' +
|
||||
'"http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Owner>' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'</Owner>' +
|
||||
'<AccessControlList>' +
|
||||
'<Grant>' +
|
||||
'<Grantee xsi:type="AmazonCustomerByEmail">' +
|
||||
'<EmailAddress>xyz@amazon.com</EmailAddress>' +
|
||||
'</Grantee>' +
|
||||
'<Permission>WRITE_ACP</Permission>' +
|
||||
'</Grant>' +
|
||||
'</AccessControlList>' +
|
||||
'</AccessControlPolicy>',
|
||||
post: '<AccessControlPolicy xmlns='
|
||||
+ '"http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Owner>'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '</Owner>'
|
||||
+ '<AccessControlList>'
|
||||
+ '<Grant>'
|
||||
+ '<Grantee xsi:type="AmazonCustomerByEmail">'
|
||||
+ '<EmailAddress>xyz@amazon.com</EmailAddress>'
|
||||
+ '</Grantee>'
|
||||
+ '<Permission>WRITE_ACP</Permission>'
|
||||
+ '</Grant>'
|
||||
+ '</AccessControlList>'
|
||||
+ '</AccessControlPolicy>',
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
assert.deepStrictEqual(err, errors.UnresolvableGrantByEmailAddress);
|
||||
|
@ -542,24 +556,25 @@ describe('putBucketACL API', () => {
|
|||
* "Grant" which is part of the s3 xml scheme for ACLs
|
||||
* so an error should be returned
|
||||
*/
|
||||
post: '<AccessControlPolicy xmlns=' +
|
||||
'"http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Owner>' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'</Owner>' +
|
||||
'<AccessControlList>' +
|
||||
'<PowerGrant>' +
|
||||
'<Grantee xsi:type="AmazonCustomerByEmail">' +
|
||||
'<EmailAddress>xyz@amazon.com</EmailAddress>' +
|
||||
'</Grantee>' +
|
||||
'<Permission>WRITE_ACP</Permission>' +
|
||||
'</PowerGrant>' +
|
||||
'</AccessControlList>' +
|
||||
'</AccessControlPolicy>',
|
||||
post: '<AccessControlPolicy xmlns='
|
||||
+ '"http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Owner>'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '</Owner>'
|
||||
+ '<AccessControlList>'
|
||||
+ '<PowerGrant>'
|
||||
+ '<Grantee xsi:type="AmazonCustomerByEmail">'
|
||||
+ '<EmailAddress>xyz@amazon.com</EmailAddress>'
|
||||
+ '</Grantee>'
|
||||
+ '<Permission>WRITE_ACP</Permission>'
|
||||
+ '</PowerGrant>'
|
||||
+ '</AccessControlList>'
|
||||
+ '</AccessControlPolicy>',
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
|
@ -579,32 +594,33 @@ describe('putBucketACL API', () => {
|
|||
* "Grant" which is part of the s3 xml scheme for ACLs
|
||||
* so an error should be returned
|
||||
*/
|
||||
post: '<AccessControlPolicy xmlns=' +
|
||||
'"http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Owner>' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'</Owner>' +
|
||||
'<AccessControlList>' +
|
||||
'<Grant>' +
|
||||
'<Grantee xsi:type="CanonicalUser">' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'</Grantee>' +
|
||||
'<Permission>FULL_CONTROL</Permission>' +
|
||||
'</Grant>' +
|
||||
'<PowerGrant>' +
|
||||
'<Grantee xsi:type="AmazonCustomerByEmail">' +
|
||||
'<EmailAddress>xyz@amazon.com</EmailAddress>' +
|
||||
'</Grantee>' +
|
||||
'<Permission>WRITE_ACP</Permission>' +
|
||||
'</PowerGrant>' +
|
||||
'</AccessControlList>' +
|
||||
'</AccessControlPolicy>',
|
||||
post: '<AccessControlPolicy xmlns='
|
||||
+ '"http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Owner>'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '</Owner>'
|
||||
+ '<AccessControlList>'
|
||||
+ '<Grant>'
|
||||
+ '<Grantee xsi:type="CanonicalUser">'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '</Grantee>'
|
||||
+ '<Permission>FULL_CONTROL</Permission>'
|
||||
+ '</Grant>'
|
||||
+ '<PowerGrant>'
|
||||
+ '<Grantee xsi:type="AmazonCustomerByEmail">'
|
||||
+ '<EmailAddress>xyz@amazon.com</EmailAddress>'
|
||||
+ '</Grantee>'
|
||||
+ '<Permission>WRITE_ACP</Permission>'
|
||||
+ '</PowerGrant>'
|
||||
+ '</AccessControlList>'
|
||||
+ '</AccessControlPolicy>',
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
|
@ -622,24 +638,25 @@ describe('putBucketACL API', () => {
|
|||
// so an error should be returned
|
||||
post: {
|
||||
'<AccessControlPolicy xmlns':
|
||||
'"http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Owner>' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'<Owner>' +
|
||||
'<AccessControlList>' +
|
||||
'<Grant>' +
|
||||
'<Grantee xsi:type="AmazonCustomerByEmail">' +
|
||||
'<EmailAddress>xyz@amazon.com</EmailAddress>' +
|
||||
'<Grantee>' +
|
||||
'<Permission>WRITE_ACP</Permission>' +
|
||||
'<Grant>' +
|
||||
'<AccessControlList>' +
|
||||
'<AccessControlPolicy>',
|
||||
'"http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Owner>'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '<Owner>'
|
||||
+ '<AccessControlList>'
|
||||
+ '<Grant>'
|
||||
+ '<Grantee xsi:type="AmazonCustomerByEmail">'
|
||||
+ '<EmailAddress>xyz@amazon.com</EmailAddress>'
|
||||
+ '<Grantee>'
|
||||
+ '<Permission>WRITE_ACP</Permission>'
|
||||
+ '<Grant>'
|
||||
+ '<AccessControlList>'
|
||||
+ '<AccessControlPolicy>',
|
||||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
|
@ -648,32 +665,33 @@ describe('putBucketACL API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should return an error if invalid group ' +
|
||||
'uri provided in ACLs set out in request body', done => {
|
||||
it('should return an error if invalid group '
|
||||
+ 'uri provided in ACLs set out in request body', done => {
|
||||
const testACLRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
// URI in grant below is not valid group URI for s3
|
||||
post: '<AccessControlPolicy xmlns=' +
|
||||
'"http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Owner>' +
|
||||
'<ID>79a59df900b949e55d96a1e698fbaced' +
|
||||
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' +
|
||||
'<DisplayName>OwnerDisplayName</DisplayName>' +
|
||||
'</Owner>' +
|
||||
'<AccessControlList>' +
|
||||
'<Grant>' +
|
||||
'<Grantee xsi:type="Group">' +
|
||||
'<URI>http://acs.amazonaws.com/groups/' +
|
||||
'global/NOTAVALIDGROUP</URI>' +
|
||||
'</Grantee>' +
|
||||
'<Permission>READ</Permission>' +
|
||||
'</Grant>' +
|
||||
'</AccessControlList>' +
|
||||
'</AccessControlPolicy>',
|
||||
post: '<AccessControlPolicy xmlns='
|
||||
+ '"http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Owner>'
|
||||
+ '<ID>79a59df900b949e55d96a1e698fbaced'
|
||||
+ 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
|
||||
+ '<DisplayName>OwnerDisplayName</DisplayName>'
|
||||
+ '</Owner>'
|
||||
+ '<AccessControlList>'
|
||||
+ '<Grant>'
|
||||
+ '<Grantee xsi:type="Group">'
|
||||
+ '<URI>http://acs.amazonaws.com/groups/'
|
||||
+ 'global/NOTAVALIDGROUP</URI>'
|
||||
+ '</Grantee>'
|
||||
+ '<Permission>READ</Permission>'
|
||||
+ '</Grant>'
|
||||
+ '</AccessControlList>'
|
||||
+ '</AccessControlPolicy>',
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
|
@ -682,19 +700,20 @@ describe('putBucketACL API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should return an error if invalid group uri' +
|
||||
'provided in ACL header request', done => {
|
||||
it('should return an error if invalid group uri'
|
||||
+ 'provided in ACL header request', done => {
|
||||
const testACLRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
headers: {
|
||||
'host': `${bucketName}.s3.amazonaws.com`,
|
||||
'x-amz-grant-full-control':
|
||||
'uri="http://acs.amazonaws.com/groups/' +
|
||||
'global/NOTAVALIDGROUP"',
|
||||
'uri="http://acs.amazonaws.com/groups/'
|
||||
+ 'global/NOTAVALIDGROUP"',
|
||||
},
|
||||
url: '/?acl',
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPutACL(authInfo, testACLRequest, log, err => {
|
||||
|
|
|
@ -3,13 +3,13 @@ const { errors } = require('arsenal');
|
|||
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
const bucketPutCors = require('../../../lib/api/bucketPutCors');
|
||||
const { _validator, parseCorsXml }
|
||||
= require('../../../lib/api/apiUtils/bucket/bucketCors');
|
||||
const { cleanup,
|
||||
const { _validator, parseCorsXml } = require('../../../lib/api/apiUtils/bucket/bucketCors');
|
||||
const {
|
||||
cleanup,
|
||||
DummyRequestLogger,
|
||||
makeAuthInfo,
|
||||
CorsConfigTester }
|
||||
= require('../helpers');
|
||||
CorsConfigTester,
|
||||
} = require('../helpers');
|
||||
const metadata = require('../../../lib/metadata/wrapper');
|
||||
|
||||
const log = new DummyRequestLogger();
|
||||
|
@ -19,6 +19,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
function _testPutBucketCors(authInfo, request, log, errCode, cb) {
|
||||
|
@ -30,13 +31,13 @@ function _testPutBucketCors(authInfo, request, log, errCode, cb) {
|
|||
}
|
||||
|
||||
function _generateSampleXml(value) {
|
||||
const xml = '<CORSConfiguration>' +
|
||||
'<CORSRule>' +
|
||||
'<AllowedMethod>PUT</AllowedMethod>' +
|
||||
'<AllowedOrigin>www.example.com</AllowedOrigin>' +
|
||||
`${value}` +
|
||||
'</CORSRule>' +
|
||||
'</CORSConfiguration>';
|
||||
const xml = '<CORSConfiguration>'
|
||||
+ '<CORSRule>'
|
||||
+ '<AllowedMethod>PUT</AllowedMethod>'
|
||||
+ '<AllowedOrigin>www.example.com</AllowedOrigin>'
|
||||
+ `${value}`
|
||||
+ '</CORSRule>'
|
||||
+ '</CORSConfiguration>';
|
||||
|
||||
return xml;
|
||||
}
|
||||
|
@ -125,8 +126,8 @@ describe('PUT bucket cors :: helper validation functions ', () => {
|
|||
|
||||
it('should return MalformedXML if more than one ID per rule', done => {
|
||||
const testValue = 'testid';
|
||||
const xml = _generateSampleXml(`<ID>${testValue}</ID>` +
|
||||
`<ID>${testValue}</ID>`);
|
||||
const xml = _generateSampleXml(`<ID>${testValue}</ID>`
|
||||
+ `<ID>${testValue}</ID>`);
|
||||
parseCorsXml(xml, log, err => {
|
||||
assert(err, 'Expected error but found none');
|
||||
assert.deepStrictEqual(err, errors.MalformedXML);
|
||||
|
@ -157,8 +158,8 @@ describe('PUT bucket cors :: helper validation functions ', () => {
|
|||
describe('validateMaxAgeSeconds ', () => {
|
||||
it('should validate successfully for valid value', done => {
|
||||
const testValue = 60;
|
||||
const xml = _generateSampleXml(`<MaxAgeSeconds>${testValue}` +
|
||||
'</MaxAgeSeconds>');
|
||||
const xml = _generateSampleXml(`<MaxAgeSeconds>${testValue}`
|
||||
+ '</MaxAgeSeconds>');
|
||||
parseCorsXml(xml, log, (err, result) => {
|
||||
assert.strictEqual(err, null, `Found unexpected err ${err}`);
|
||||
assert.strictEqual(typeof result[0].maxAgeSeconds, 'number');
|
||||
|
@ -167,12 +168,13 @@ describe('PUT bucket cors :: helper validation functions ', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should return MalformedXML if more than one MaxAgeSeconds ' +
|
||||
'per rule', done => {
|
||||
it('should return MalformedXML if more than one MaxAgeSeconds '
|
||||
+ 'per rule', done => {
|
||||
const testValue = '60';
|
||||
const xml = _generateSampleXml(
|
||||
`<MaxAgeSeconds>${testValue}</MaxAgeSeconds>` +
|
||||
`<MaxAgeSeconds>${testValue}</MaxAgeSeconds>`);
|
||||
`<MaxAgeSeconds>${testValue}</MaxAgeSeconds>`
|
||||
+ `<MaxAgeSeconds>${testValue}</MaxAgeSeconds>`,
|
||||
);
|
||||
parseCorsXml(xml, log, err => {
|
||||
assert(err, 'Expected error but found none');
|
||||
assert.deepStrictEqual(err, errors.MalformedXML);
|
||||
|
@ -182,8 +184,8 @@ describe('PUT bucket cors :: helper validation functions ', () => {
|
|||
|
||||
it('should validate & return undefined if empty value', done => {
|
||||
const testValue = '';
|
||||
const xml = _generateSampleXml(`<MaxAgeSeconds>${testValue}` +
|
||||
'</MaxAgeSeconds>');
|
||||
const xml = _generateSampleXml(`<MaxAgeSeconds>${testValue}`
|
||||
+ '</MaxAgeSeconds>');
|
||||
parseCorsXml(xml, log, (err, result) => {
|
||||
assert.strictEqual(err, null, `Found unexpected err ${err}`);
|
||||
assert.strictEqual(result[0].MaxAgeSeconds, undefined);
|
||||
|
|
|
@ -14,6 +14,7 @@ const bucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
describe('bucketPutEncryption API', () => {
|
||||
|
@ -32,25 +33,27 @@ describe('bucketPutEncryption API', () => {
|
|||
|
||||
it('should reject a config with no Rule', done => {
|
||||
bucketPutEncryption(authInfo, templateRequest(bucketName,
|
||||
{ post: `<?xml version="1.0" encoding="UTF-8"?>
|
||||
{
|
||||
post: `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ServerSideEncryptionConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
||||
</ServerSideEncryptionConfiguration>`,
|
||||
}), log, err => {
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
done();
|
||||
});
|
||||
}), log, err => {
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should reject a config with no ApplyServerSideEncryptionByDefault section', done => {
|
||||
bucketPutEncryption(authInfo, templateRequest(bucketName,
|
||||
{ post: `<?xml version="1.0" encoding="UTF-8"?>
|
||||
{
|
||||
post: `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ServerSideEncryptionConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
||||
<Rule></Rule>
|
||||
</ServerSideEncryptionConfiguration>`,
|
||||
}), log, err => {
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
done();
|
||||
});
|
||||
}), log, err => {
|
||||
assert.strictEqual(err.is.MalformedXML, true);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should reject a config with no SSEAlgorithm', done => {
|
||||
|
@ -155,33 +158,32 @@ describe('bucketPutEncryption API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should update SSEAlgorithm if existing SSEAlgorithm is AES256, ' +
|
||||
'new SSEAlgorithm is aws:kms and no KMSMasterKeyID is provided',
|
||||
done => {
|
||||
const post = templateSSEConfig({ algorithm: 'AES256' });
|
||||
bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => {
|
||||
it('should update SSEAlgorithm if existing SSEAlgorithm is AES256, '
|
||||
+ 'new SSEAlgorithm is aws:kms and no KMSMasterKeyID is provided',
|
||||
done => {
|
||||
const post = templateSSEConfig({ algorithm: 'AES256' });
|
||||
bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => {
|
||||
assert.ifError(err);
|
||||
return getSSEConfig(bucketName, log, (err, sseInfo) => {
|
||||
assert.ifError(err);
|
||||
return getSSEConfig(bucketName, log, (err, sseInfo) => {
|
||||
assert.ifError(err);
|
||||
const { masterKeyId } = sseInfo;
|
||||
const newConf = templateSSEConfig({ algorithm: 'aws:kms' });
|
||||
return bucketPutEncryption(authInfo, templateRequest(bucketName, { post: newConf }), log,
|
||||
err => {
|
||||
assert.ifError(err);
|
||||
return getSSEConfig(bucketName, log, (err, updatedSSEInfo) => {
|
||||
assert.deepStrictEqual(updatedSSEInfo, {
|
||||
mandatory: true,
|
||||
algorithm: 'aws:kms',
|
||||
cryptoScheme: 1,
|
||||
masterKeyId,
|
||||
});
|
||||
done();
|
||||
const { masterKeyId } = sseInfo;
|
||||
const newConf = templateSSEConfig({ algorithm: 'aws:kms' });
|
||||
return bucketPutEncryption(authInfo, templateRequest(bucketName, { post: newConf }), log,
|
||||
err => {
|
||||
assert.ifError(err);
|
||||
return getSSEConfig(bucketName, log, (err, updatedSSEInfo) => {
|
||||
assert.deepStrictEqual(updatedSSEInfo, {
|
||||
mandatory: true,
|
||||
algorithm: 'aws:kms',
|
||||
cryptoScheme: 1,
|
||||
masterKeyId,
|
||||
});
|
||||
}
|
||||
);
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should update SSEAlgorithm to aws:kms and set KMSMasterKeyID', done => {
|
||||
const post = templateSSEConfig({ algorithm: 'AES256' });
|
||||
|
|
|
@ -17,6 +17,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const expectedLifecycleConfig = {
|
||||
|
|
|
@ -15,6 +15,7 @@ const bucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const expectedNotifConfig = {
|
||||
|
@ -52,6 +53,7 @@ function getNotifRequest(empty) {
|
|||
host: `${bucketName}.s3.amazonaws.com`,
|
||||
},
|
||||
post: notifXml,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
return putNotifConfigRequest;
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ const bucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const objectLockXml = '<ObjectLockConfiguration ' +
|
||||
|
@ -30,6 +31,7 @@ const putObjLockRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: objectLockXml,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const expectedObjectLockConfig = {
|
||||
|
|
|
@ -15,6 +15,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
let expectedBucketPolicy = {};
|
||||
|
@ -25,6 +26,7 @@ function getPolicyRequest(policy) {
|
|||
host: `${bucketName}.s3.amazonaws.com`,
|
||||
},
|
||||
post: JSON.stringify(policy),
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -76,7 +78,7 @@ describe('putBucketPolicy API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should return error if policy contains conditions', done => {
|
||||
it.skip('should return error if policy contains conditions', done => {
|
||||
expectedBucketPolicy.Statement[0].Condition =
|
||||
{ StringEquals: { 's3:x-amz-acl': ['public-read'] } };
|
||||
bucketPutPolicy(authInfo, getPolicyRequest(expectedBucketPolicy), log,
|
||||
|
|
|
@ -19,6 +19,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
function _getPutWebsiteRequest(xml) {
|
||||
|
@ -29,6 +30,7 @@ function _getPutWebsiteRequest(xml) {
|
|||
},
|
||||
url: '/?website',
|
||||
query: { website: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
request.post = xml;
|
||||
return request;
|
||||
|
|
|
@ -24,6 +24,7 @@ const bucketPutRequest = {
|
|||
namespace,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const objectKey = 'testObject';
|
||||
const initiateRequest = {
|
||||
|
@ -32,6 +33,7 @@ const initiateRequest = {
|
|||
objectKey,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: `/${objectKey}?uploads`,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const eastLocation = 'us-east-1';
|
||||
const westLocation = 'scality-internal-file';
|
||||
|
@ -68,6 +70,7 @@ function _createAndAbortMpu(usEastSetting, fakeUploadID, locationConstraint,
|
|||
partNumber: '1',
|
||||
uploadId,
|
||||
},
|
||||
actionImplicitDenies: false,
|
||||
}, partBody);
|
||||
const testUploadId = fakeUploadID ? 'nonexistinguploadid' :
|
||||
uploadId;
|
||||
|
@ -78,6 +81,7 @@ function _createAndAbortMpu(usEastSetting, fakeUploadID, locationConstraint,
|
|||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: `/${objectKey}?uploadId=${testUploadId}`,
|
||||
query: { uploadId: testUploadId },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
next(null, partRequest, deleteMpuRequest);
|
||||
},
|
||||
|
|
|
@ -22,6 +22,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const testPutObjectRequest = new DummyRequest({
|
||||
|
|
|
@ -47,6 +47,7 @@ describe('objectGet API', () => {
|
|||
namespace,
|
||||
headers: {},
|
||||
url: `/${bucketName}`,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const userMetadataKey = 'x-amz-meta-test';
|
||||
const userMetadataValue = 'some metadata';
|
||||
|
@ -56,6 +57,7 @@ describe('objectGet API', () => {
|
|||
objectKey: objectName,
|
||||
headers: {},
|
||||
url: `/${bucketName}/${objectName}`,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
it('should get the object metadata', done => {
|
||||
|
@ -84,6 +86,7 @@ describe('objectGet API', () => {
|
|||
'x-amz-bucket-object-lock-enabled': 'true',
|
||||
},
|
||||
url: `/${bucketName}`,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const createPutDummyRetention = (date, mode) => new DummyRequest({
|
||||
|
@ -245,6 +248,7 @@ describe('objectGet API', () => {
|
|||
objectKey: objectName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: `/${objectName}?uploads`,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
async.waterfall([
|
||||
next => bucketPut(authInfo, testPutBucketRequest, log, next),
|
||||
|
@ -321,6 +325,7 @@ describe('objectGet API', () => {
|
|||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
query: { uploadId: testUploadId },
|
||||
post: completeBody,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
completeMultipartUpload(authInfo, completeRequest,
|
||||
log, err => {
|
||||
|
|
|
@ -36,6 +36,7 @@ describe('objectGetACL API', () => {
|
|||
'x-amz-acl': 'public-read-write',
|
||||
},
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const testGetACLRequest = {
|
||||
bucketName,
|
||||
|
@ -44,6 +45,7 @@ describe('objectGetACL API', () => {
|
|||
objectKey: objectName,
|
||||
url: `/${bucketName}/${objectName}?acl`,
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
it('should get a canned private ACL', done => {
|
||||
|
|
|
@ -18,6 +18,7 @@ const bucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const putObjectRequest = new DummyRequest({
|
||||
|
@ -37,12 +38,14 @@ const putObjectLegalHoldRequest = status => ({
|
|||
objectKey: objectName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: objectLegalHoldXml(status),
|
||||
actionImplicitDenies: false,
|
||||
});
|
||||
|
||||
const getObjectLegalHoldRequest = {
|
||||
bucketName,
|
||||
objectKey: objectName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
describe('getObjectLegalHold API', () => {
|
||||
|
|
|
@ -21,6 +21,7 @@ const bucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const putObjectRequest = new DummyRequest({
|
||||
|
@ -42,12 +43,14 @@ const putObjRetRequest = {
|
|||
objectKey: objectName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: objectRetentionXml,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const getObjRetRequest = {
|
||||
bucketName,
|
||||
objectKey: objectName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
describe('getObjectRetention API', () => {
|
||||
|
|
|
@ -21,6 +21,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const testPutObjectRequest = new DummyRequest({
|
||||
|
|
|
@ -4,14 +4,16 @@ const moment = require('moment');
|
|||
const { errors, s3middleware } = require('arsenal');
|
||||
const sinon = require('sinon');
|
||||
|
||||
const { ds } = require('arsenal').storage.data.inMemory.datastore;
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
const bucketPutObjectLock = require('../../../lib/api/bucketPutObjectLock');
|
||||
const bucketPutACL = require('../../../lib/api/bucketPutACL');
|
||||
const bucketPutVersioning = require('../../../lib/api/bucketPutVersioning');
|
||||
|
||||
const { parseTagFromQuery } = s3middleware.tagging;
|
||||
const { cleanup, DummyRequestLogger, makeAuthInfo, versioningTestUtils }
|
||||
= require('../helpers');
|
||||
const { ds } = require('arsenal').storage.data.inMemory.datastore;
|
||||
const {
|
||||
cleanup, DummyRequestLogger, makeAuthInfo, versioningTestUtils,
|
||||
} = require('../helpers');
|
||||
const metadata = require('../metadataswitch');
|
||||
const objectPut = require('../../../lib/api/objectPut');
|
||||
const { objectLockTestUtils } = require('../helpers');
|
||||
|
@ -19,7 +21,7 @@ const DummyRequest = require('../DummyRequest');
|
|||
const mpuUtils = require('../utils/mpuUtils');
|
||||
const { lastModifiedHeader } = require('../../../constants');
|
||||
|
||||
const any = sinon.match.any;
|
||||
const { any } = sinon.match;
|
||||
|
||||
const log = new DummyRequestLogger();
|
||||
const canonicalID = 'accessKey1';
|
||||
|
@ -49,10 +51,8 @@ const originalputObjectMD = metadata.putObjectMD;
|
|||
const objectName = 'objectName';
|
||||
|
||||
let testPutObjectRequest;
|
||||
const enableVersioningRequest =
|
||||
versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Enabled');
|
||||
const suspendVersioningRequest =
|
||||
versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Suspended');
|
||||
const enableVersioningRequest = versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Enabled');
|
||||
const suspendVersioningRequest = versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Suspended');
|
||||
|
||||
function testAuth(bucketOwner, authUser, bucketPutReq, log, cb) {
|
||||
bucketPut(bucketOwner, bucketPutReq, log, () => {
|
||||
|
@ -74,8 +74,10 @@ describe('parseTagFromQuery', () => {
|
|||
const allowedChar = '+- =._:/';
|
||||
const tests = [
|
||||
{ tagging: 'key1=value1', result: { key1: 'value1' } },
|
||||
{ tagging: `key1=${encodeURIComponent(allowedChar)}`,
|
||||
result: { key1: allowedChar } },
|
||||
{
|
||||
tagging: `key1=${encodeURIComponent(allowedChar)}`,
|
||||
result: { key1: allowedChar },
|
||||
},
|
||||
{ tagging: 'key1=value1=value2', error: invalidArgument },
|
||||
{ tagging: '=value1', error: invalidArgument },
|
||||
{ tagging: 'key1%=value1', error: invalidArgument },
|
||||
|
@ -143,16 +145,14 @@ describe('objectPut API', () => {
|
|||
it('should put object if user has FULL_CONTROL grant on bucket', done => {
|
||||
const bucketOwner = makeAuthInfo('accessKey2');
|
||||
const authUser = makeAuthInfo('accessKey3');
|
||||
testPutBucketRequest.headers['x-amz-grant-full-control'] =
|
||||
`id=${authUser.getCanonicalID()}`;
|
||||
testPutBucketRequest.headers['x-amz-grant-full-control'] = `id=${authUser.getCanonicalID()}`;
|
||||
testAuth(bucketOwner, authUser, testPutBucketRequest, log, done);
|
||||
});
|
||||
|
||||
it('should put object if user has WRITE grant on bucket', done => {
|
||||
const bucketOwner = makeAuthInfo('accessKey2');
|
||||
const authUser = makeAuthInfo('accessKey3');
|
||||
testPutBucketRequest.headers['x-amz-grant-write'] =
|
||||
`id=${authUser.getCanonicalID()}`;
|
||||
testPutBucketRequest.headers['x-amz-grant-write'] = `id=${authUser.getCanonicalID()}`;
|
||||
|
||||
testAuth(bucketOwner, authUser, testPutBucketRequest, log, done);
|
||||
});
|
||||
|
@ -183,7 +183,7 @@ describe('objectPut API', () => {
|
|||
{}, log, (err, md) => {
|
||||
assert(md);
|
||||
assert
|
||||
.strictEqual(md['content-md5'], correctMD5);
|
||||
.strictEqual(md['content-md5'], correctMD5);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -240,8 +240,8 @@ describe('objectPut API', () => {
|
|||
];
|
||||
testObjectLockConfigs.forEach(config => {
|
||||
const { testMode, type, val } = config;
|
||||
it('should put an object with default retention if object does not ' +
|
||||
'have retention configuration but bucket has', done => {
|
||||
it('should put an object with default retention if object does not '
|
||||
+ 'have retention configuration but bucket has', done => {
|
||||
const testPutObjectRequest = new DummyRequest({
|
||||
bucketName,
|
||||
namespace,
|
||||
|
@ -255,6 +255,7 @@ describe('objectPut API', () => {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: objectLockTestUtils.generateXml(testMode, val, type),
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequestLock, log, () => {
|
||||
|
@ -268,10 +269,8 @@ describe('objectPut API', () => {
|
|||
const mode = md.retentionMode;
|
||||
const retainDate = md.retentionDate;
|
||||
const date = moment();
|
||||
const days
|
||||
= type === 'Days' ? val : val * 365;
|
||||
const expectedDate
|
||||
= date.add(days, 'days');
|
||||
const days = type === 'Days' ? val : val * 365;
|
||||
const expectedDate = date.add(days, 'days');
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(mode, testMode);
|
||||
assert.strictEqual(formatTime(retainDate),
|
||||
|
@ -365,11 +364,11 @@ describe('objectPut API', () => {
|
|||
(err, md) => {
|
||||
assert(md);
|
||||
assert.strictEqual(md['x-amz-meta-test'],
|
||||
'some metadata');
|
||||
'some metadata');
|
||||
assert.strictEqual(md['x-amz-meta-test2'],
|
||||
'some more metadata');
|
||||
'some more metadata');
|
||||
assert.strictEqual(md['x-amz-meta-test3'],
|
||||
'even more metadata');
|
||||
'even more metadata');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -438,7 +437,7 @@ describe('objectPut API', () => {
|
|||
(err, md) => {
|
||||
assert(md);
|
||||
assert.strictEqual(md['x-amz-meta-x-scal-last-modified'],
|
||||
imposedLastModified);
|
||||
imposedLastModified);
|
||||
const lastModified = md['last-modified'];
|
||||
const lastModifiedDate = lastModified.split('T')[0];
|
||||
const currentTs = new Date().toJSON();
|
||||
|
@ -478,11 +477,11 @@ describe('objectPut API', () => {
|
|||
assert(md);
|
||||
assert.strictEqual(md.location, null);
|
||||
assert.strictEqual(md['x-amz-meta-test'],
|
||||
'some metadata');
|
||||
'some metadata');
|
||||
assert.strictEqual(md['x-amz-meta-test2'],
|
||||
'some more metadata');
|
||||
'some more metadata');
|
||||
assert.strictEqual(md['x-amz-meta-test3'],
|
||||
'even more metadata');
|
||||
'even more metadata');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -503,19 +502,19 @@ describe('objectPut API', () => {
|
|||
undefined, log, () => {
|
||||
objectPut(authInfo, testPutObjectRequest2, undefined,
|
||||
log,
|
||||
() => {
|
||||
() => {
|
||||
// orphan objects don't get deleted
|
||||
// until the next tick
|
||||
// in memory
|
||||
setImmediate(() => {
|
||||
setImmediate(() => {
|
||||
// Data store starts at index 1
|
||||
assert.strictEqual(ds[0], undefined);
|
||||
assert.strictEqual(ds[1], undefined);
|
||||
assert.deepStrictEqual(ds[2].value,
|
||||
Buffer.from('I am another body', 'utf8'));
|
||||
done();
|
||||
assert.strictEqual(ds[0], undefined);
|
||||
assert.strictEqual(ds[1], undefined);
|
||||
assert.deepStrictEqual(ds[2].value,
|
||||
Buffer.from('I am another body', 'utf8'));
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -534,8 +533,8 @@ describe('objectPut API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should not put object with retention configuration if object lock ' +
|
||||
'is not enabled on the bucket', done => {
|
||||
it('should not put object with retention configuration if object lock '
|
||||
+ 'is not enabled on the bucket', done => {
|
||||
const testPutObjectRequest = new DummyRequest({
|
||||
bucketName,
|
||||
namespace,
|
||||
|
@ -552,15 +551,14 @@ describe('objectPut API', () => {
|
|||
objectPut(authInfo, testPutObjectRequest, undefined, log, err => {
|
||||
assert.deepStrictEqual(err, errors.InvalidRequest
|
||||
.customizeDescription(
|
||||
'Bucket is missing ObjectLockConfiguration'));
|
||||
'Bucket is missing ObjectLockConfiguration',
|
||||
));
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
it('should forward a 400 back to client on metadata 408 response', () => {
|
||||
metadata.putObjectMD =
|
||||
(bucketName, objName, objVal, params, log, cb) =>
|
||||
cb({ httpCode: 408 });
|
||||
metadata.putObjectMD = (bucketName, objName, objVal, params, log, cb) => cb({ httpCode: 408 });
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||
|
@ -571,9 +569,7 @@ describe('objectPut API', () => {
|
|||
});
|
||||
|
||||
it('should forward a 502 to the client for 4xx != 408', () => {
|
||||
metadata.putObjectMD =
|
||||
(bucketName, objName, objVal, params, log, cb) =>
|
||||
cb({ httpCode: 412 });
|
||||
metadata.putObjectMD = (bucketName, objName, objVal, params, log, cb) => cb({ httpCode: 412 });
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||
|
@ -589,13 +585,12 @@ describe('objectPut API with versioning', () => {
|
|||
cleanup();
|
||||
});
|
||||
|
||||
const objData = ['foo0', 'foo1', 'foo2'].map(str =>
|
||||
Buffer.from(str, 'utf8'));
|
||||
const objData = ['foo0', 'foo1', 'foo2'].map(str => Buffer.from(str, 'utf8'));
|
||||
const testPutObjectRequests = objData.map(data => versioningTestUtils
|
||||
.createPutObjectRequest(bucketName, objectName, data));
|
||||
|
||||
it('should delete latest version when creating new null version ' +
|
||||
'if latest version is null version', done => {
|
||||
it('should delete latest version when creating new null version '
|
||||
+ 'if latest version is null version', done => {
|
||||
async.series([
|
||||
callback => bucketPut(authInfo, testPutBucketRequest, log,
|
||||
callback),
|
||||
|
@ -633,8 +628,7 @@ describe('objectPut API with versioning', () => {
|
|||
});
|
||||
|
||||
describe('when null version is not the latest version', () => {
|
||||
const objData = ['foo0', 'foo1', 'foo2'].map(str =>
|
||||
Buffer.from(str, 'utf8'));
|
||||
const objData = ['foo0', 'foo1', 'foo2'].map(str => Buffer.from(str, 'utf8'));
|
||||
const testPutObjectRequests = objData.map(data => versioningTestUtils
|
||||
.createPutObjectRequest(bucketName, objectName, data));
|
||||
beforeEach(done => {
|
||||
|
@ -662,23 +656,23 @@ describe('objectPut API with versioning', () => {
|
|||
});
|
||||
|
||||
it('should still delete null version when creating new null version',
|
||||
done => {
|
||||
objectPut(authInfo, testPutObjectRequests[2], undefined,
|
||||
log, err => {
|
||||
assert.ifError(err, `Unexpected err: ${err}`);
|
||||
setImmediate(() => {
|
||||
done => {
|
||||
objectPut(authInfo, testPutObjectRequests[2], undefined,
|
||||
log, err => {
|
||||
assert.ifError(err, `Unexpected err: ${err}`);
|
||||
setImmediate(() => {
|
||||
// old null version should be deleted after putting
|
||||
// new null version
|
||||
versioningTestUtils.assertDataStoreValues(ds,
|
||||
[undefined, objData[1], objData[2]]);
|
||||
done(err);
|
||||
versioningTestUtils.assertDataStoreValues(ds,
|
||||
[undefined, objData[1], objData[2]]);
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should return BadDigest error and not leave orphans in data when ' +
|
||||
'contentMD5 and completedHash do not match', done => {
|
||||
it('should return BadDigest error and not leave orphans in data when '
|
||||
+ 'contentMD5 and completedHash do not match', done => {
|
||||
const testPutObjectRequest = new DummyRequest({
|
||||
bucketName,
|
||||
namespace,
|
||||
|
@ -690,18 +684,18 @@ describe('objectPut API with versioning', () => {
|
|||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||
err => {
|
||||
assert.deepStrictEqual(err, errors.BadDigest);
|
||||
// orphan objects don't get deleted
|
||||
// until the next tick
|
||||
// in memory
|
||||
setImmediate(() => {
|
||||
err => {
|
||||
assert.deepStrictEqual(err, errors.BadDigest);
|
||||
// orphan objects don't get deleted
|
||||
// until the next tick
|
||||
// in memory
|
||||
setImmediate(() => {
|
||||
// Data store starts at index 1
|
||||
assert.strictEqual(ds[0], undefined);
|
||||
assert.strictEqual(ds[1], undefined);
|
||||
done();
|
||||
assert.strictEqual(ds[0], undefined);
|
||||
assert.strictEqual(ds[1], undefined);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -3,11 +3,12 @@ const { errors } = require('arsenal');
|
|||
|
||||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
const constants = require('../../../constants');
|
||||
const { cleanup,
|
||||
const {
|
||||
cleanup,
|
||||
DummyRequestLogger,
|
||||
makeAuthInfo,
|
||||
AccessControlPolicy }
|
||||
= require('../helpers');
|
||||
AccessControlPolicy,
|
||||
} = require('../helpers');
|
||||
const metadata = require('../metadataswitch');
|
||||
const objectPut = require('../../../lib/api/objectPut');
|
||||
const objectPutACL = require('../../../lib/api/objectPutACL');
|
||||
|
@ -17,8 +18,8 @@ const log = new DummyRequestLogger();
|
|||
const canonicalID = 'accessKey1';
|
||||
const authInfo = makeAuthInfo(canonicalID);
|
||||
const ownerID = authInfo.getCanonicalID();
|
||||
const anotherID = '79a59df900b949e55d96a1e698fba' +
|
||||
'cedfd6e09d98eacf8f8d5218e7cd47ef2bf';
|
||||
const anotherID = '79a59df900b949e55d96a1e698fba'
|
||||
+ 'cedfd6e09d98eacf8f8d5218e7cd47ef2bf';
|
||||
const defaultAcpParams = {
|
||||
ownerID,
|
||||
ownerDisplayName: 'OwnerDisplayName',
|
||||
|
@ -56,6 +57,7 @@ describe('putObjectACL API', () => {
|
|||
headers: { 'x-amz-acl': 'invalid-option' },
|
||||
url: `/${bucketName}/${objectName}?acl`,
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
|
@ -79,6 +81,7 @@ describe('putObjectACL API', () => {
|
|||
headers: { 'x-amz-acl': 'public-read-write' },
|
||||
url: `/${bucketName}/${objectName}?acl`,
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
|
@ -88,12 +91,12 @@ describe('putObjectACL API', () => {
|
|||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||
assert.strictEqual(err, null);
|
||||
metadata.getObjectMD(bucketName, objectName, {},
|
||||
log, (err, md) => {
|
||||
assert.strictEqual(md.acl.Canned,
|
||||
'public-read-write');
|
||||
assert.strictEqual(md.originOp, 's3:ObjectAcl:Put');
|
||||
done();
|
||||
});
|
||||
log, (err, md) => {
|
||||
assert.strictEqual(md.acl.Canned,
|
||||
'public-read-write');
|
||||
assert.strictEqual(md.originOp, 's3:ObjectAcl:Put');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -108,6 +111,7 @@ describe('putObjectACL API', () => {
|
|||
headers: { 'x-amz-acl': 'public-read' },
|
||||
url: `/${bucketName}/${objectName}?acl`,
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const testObjACLRequest2 = {
|
||||
|
@ -117,6 +121,7 @@ describe('putObjectACL API', () => {
|
|||
headers: { 'x-amz-acl': 'authenticated-read' },
|
||||
url: `/${bucketName}/${objectName}?acl`,
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
|
@ -126,22 +131,22 @@ describe('putObjectACL API', () => {
|
|||
objectPutACL(authInfo, testObjACLRequest1, log, err => {
|
||||
assert.strictEqual(err, null);
|
||||
metadata.getObjectMD(bucketName, objectName, {},
|
||||
log, (err, md) => {
|
||||
assert.strictEqual(md.acl.Canned,
|
||||
'public-read');
|
||||
objectPutACL(authInfo, testObjACLRequest2, log,
|
||||
err => {
|
||||
assert.strictEqual(err, null);
|
||||
metadata.getObjectMD(bucketName,
|
||||
objectName, {}, log, (err, md) => {
|
||||
assert.strictEqual(md
|
||||
.acl.Canned,
|
||||
'authenticated-read');
|
||||
assert.strictEqual(md.originOp, 's3:ObjectAcl:Put');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
log, (err, md) => {
|
||||
assert.strictEqual(md.acl.Canned,
|
||||
'public-read');
|
||||
objectPutACL(authInfo, testObjACLRequest2, log,
|
||||
err => {
|
||||
assert.strictEqual(err, null);
|
||||
metadata.getObjectMD(bucketName,
|
||||
objectName, {}, log, (err, md) => {
|
||||
assert.strictEqual(md
|
||||
.acl.Canned,
|
||||
'authenticated-read');
|
||||
assert.strictEqual(md.originOp, 's3:ObjectAcl:Put');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -154,14 +159,15 @@ describe('putObjectACL API', () => {
|
|||
objectKey: objectName,
|
||||
headers: {
|
||||
'x-amz-grant-full-control':
|
||||
'emailaddress="sampleaccount1@sampling.com"' +
|
||||
',emailaddress="sampleaccount2@sampling.com"',
|
||||
'emailaddress="sampleaccount1@sampling.com"'
|
||||
+ ',emailaddress="sampleaccount2@sampling.com"',
|
||||
'x-amz-grant-read': `uri=${constants.logId}`,
|
||||
'x-amz-grant-read-acp': `id=${ownerID}`,
|
||||
'x-amz-grant-write-acp': `id=${anotherID}`,
|
||||
},
|
||||
url: `/${bucketName}/${objectName}?acl`,
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||
|
@ -191,19 +197,20 @@ describe('putObjectACL API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should return an error if invalid email ' +
|
||||
'provided in ACL header request', done => {
|
||||
it('should return an error if invalid email '
|
||||
+ 'provided in ACL header request', done => {
|
||||
const testObjACLRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
objectKey: objectName,
|
||||
headers: {
|
||||
'x-amz-grant-full-control':
|
||||
'emailaddress="sampleaccount1@sampling.com"' +
|
||||
',emailaddress="nonexistentemail@sampling.com"',
|
||||
'emailaddress="sampleaccount1@sampling.com"'
|
||||
+ ',emailaddress="nonexistentemail@sampling.com"',
|
||||
},
|
||||
url: `/${bucketName}/${objectName}?acl`,
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
|
@ -234,6 +241,7 @@ describe('putObjectACL API', () => {
|
|||
url: `/${bucketName}/${objectName}?acl`,
|
||||
post: [Buffer.from(acp.getXml(), 'utf8')],
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
|
@ -243,25 +251,25 @@ describe('putObjectACL API', () => {
|
|||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||
assert.strictEqual(err, null);
|
||||
metadata.getObjectMD(bucketName, objectName, {},
|
||||
log, (err, md) => {
|
||||
assert.strictEqual(md
|
||||
.acl.FULL_CONTROL[0], ownerID);
|
||||
assert.strictEqual(md
|
||||
.acl.READ[0], constants.publicId);
|
||||
assert.strictEqual(md
|
||||
.acl.WRITE_ACP[0], ownerID);
|
||||
assert.strictEqual(md
|
||||
.acl.READ_ACP[0], anotherID);
|
||||
assert.strictEqual(md.originOp, 's3:ObjectAcl:Put');
|
||||
done();
|
||||
});
|
||||
log, (err, md) => {
|
||||
assert.strictEqual(md
|
||||
.acl.FULL_CONTROL[0], ownerID);
|
||||
assert.strictEqual(md
|
||||
.acl.READ[0], constants.publicId);
|
||||
assert.strictEqual(md
|
||||
.acl.WRITE_ACP[0], ownerID);
|
||||
assert.strictEqual(md
|
||||
.acl.READ_ACP[0], anotherID);
|
||||
assert.strictEqual(md.originOp, 's3:ObjectAcl:Put');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should return an error if wrong owner ID ' +
|
||||
'provided in ACLs set out in request body', done => {
|
||||
it('should return an error if wrong owner ID '
|
||||
+ 'provided in ACLs set out in request body', done => {
|
||||
const acp = new AccessControlPolicy({ ownerID: anotherID });
|
||||
const testObjACLRequest = {
|
||||
bucketName,
|
||||
|
@ -271,6 +279,7 @@ describe('putObjectACL API', () => {
|
|||
url: `/${bucketName}/${objectName}?acl`,
|
||||
post: [Buffer.from(acp.getXml(), 'utf8')],
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
|
@ -285,8 +294,8 @@ describe('putObjectACL API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should ignore if WRITE ACL permission is ' +
|
||||
'provided in request body', done => {
|
||||
it('should ignore if WRITE ACL permission is '
|
||||
+ 'provided in request body', done => {
|
||||
const acp = new AccessControlPolicy(defaultAcpParams);
|
||||
acp.addGrantee('CanonicalUser', ownerID, 'FULL_CONTROL',
|
||||
'OwnerDisplayName');
|
||||
|
@ -299,6 +308,7 @@ describe('putObjectACL API', () => {
|
|||
url: `/${bucketName}/${objectName}?acl`,
|
||||
post: [Buffer.from(acp.getXml(), 'utf8')],
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
|
@ -308,25 +318,25 @@ describe('putObjectACL API', () => {
|
|||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||
assert.strictEqual(err, null);
|
||||
metadata.getObjectMD(bucketName, objectName, {},
|
||||
log, (err, md) => {
|
||||
assert.strictEqual(md.acl.Canned, '');
|
||||
assert.strictEqual(md.acl.FULL_CONTROL[0],
|
||||
ownerID);
|
||||
assert.strictEqual(md.acl.WRITE, undefined);
|
||||
assert.strictEqual(md.acl.READ[0], undefined);
|
||||
assert.strictEqual(md.acl.WRITE_ACP[0],
|
||||
undefined);
|
||||
assert.strictEqual(md.acl.READ_ACP[0],
|
||||
undefined);
|
||||
done();
|
||||
});
|
||||
log, (err, md) => {
|
||||
assert.strictEqual(md.acl.Canned, '');
|
||||
assert.strictEqual(md.acl.FULL_CONTROL[0],
|
||||
ownerID);
|
||||
assert.strictEqual(md.acl.WRITE, undefined);
|
||||
assert.strictEqual(md.acl.READ[0], undefined);
|
||||
assert.strictEqual(md.acl.WRITE_ACP[0],
|
||||
undefined);
|
||||
assert.strictEqual(md.acl.READ_ACP[0],
|
||||
undefined);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should return an error if invalid email ' +
|
||||
'address provided in ACLs set out in request body', done => {
|
||||
it('should return an error if invalid email '
|
||||
+ 'address provided in ACLs set out in request body', done => {
|
||||
const acp = new AccessControlPolicy(defaultAcpParams);
|
||||
acp.addGrantee('AmazonCustomerByEmail', 'xyz@amazon.com', 'WRITE_ACP');
|
||||
const testObjACLRequest = {
|
||||
|
@ -337,6 +347,7 @@ describe('putObjectACL API', () => {
|
|||
url: `/${bucketName}/${objectName}?acl`,
|
||||
post: [Buffer.from(acp.getXml(), 'utf8')],
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
|
||||
|
@ -352,8 +363,8 @@ describe('putObjectACL API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should return an error if xml provided does not match s3 ' +
|
||||
'scheme for setting ACLs', done => {
|
||||
it('should return an error if xml provided does not match s3 '
|
||||
+ 'scheme for setting ACLs', done => {
|
||||
const acp = new AccessControlPolicy(defaultAcpParams);
|
||||
acp.addGrantee('AmazonCustomerByEmail', 'xyz@amazon.com', 'WRITE_ACP');
|
||||
const originalXml = acp.getXml();
|
||||
|
@ -366,6 +377,7 @@ describe('putObjectACL API', () => {
|
|||
url: `/${bucketName}/${objectName}?acl`,
|
||||
post: [Buffer.from(modifiedXml, 'utf8')],
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
|
@ -394,6 +406,7 @@ describe('putObjectACL API', () => {
|
|||
url: `/${bucketName}/${objectName}?acl`,
|
||||
post: [Buffer.from(modifiedXml, 'utf8')],
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
|
||||
|
@ -409,11 +422,11 @@ describe('putObjectACL API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should return an error if invalid group ' +
|
||||
'uri provided in ACLs set out in request body', done => {
|
||||
it('should return an error if invalid group '
|
||||
+ 'uri provided in ACLs set out in request body', done => {
|
||||
const acp = new AccessControlPolicy(defaultAcpParams);
|
||||
acp.addGrantee('Group', 'http://acs.amazonaws.com/groups/' +
|
||||
'global/NOTAVALIDGROUP', 'WRITE_ACP');
|
||||
acp.addGrantee('Group', 'http://acs.amazonaws.com/groups/'
|
||||
+ 'global/NOTAVALIDGROUP', 'WRITE_ACP');
|
||||
const testObjACLRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
|
@ -422,6 +435,7 @@ describe('putObjectACL API', () => {
|
|||
url: `/${bucketName}/${objectName}?acl`,
|
||||
post: [Buffer.from(acp.getXml(), 'utf8')],
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
|
@ -436,8 +450,8 @@ describe('putObjectACL API', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it('should return an error if invalid group uri ' +
|
||||
'provided in ACL header request', done => {
|
||||
it('should return an error if invalid group uri '
|
||||
+ 'provided in ACL header request', done => {
|
||||
const testObjACLRequest = {
|
||||
bucketName,
|
||||
namespace,
|
||||
|
@ -445,11 +459,12 @@ describe('putObjectACL API', () => {
|
|||
headers: {
|
||||
'host': 's3.amazonaws.com',
|
||||
'x-amz-grant-full-control':
|
||||
'uri="http://acs.amazonaws.com/groups/' +
|
||||
'global/NOTAVALIDGROUP"',
|
||||
'uri="http://acs.amazonaws.com/groups/'
|
||||
+ 'global/NOTAVALIDGROUP"',
|
||||
},
|
||||
url: `/${bucketName}/${objectName}?acl`,
|
||||
query: { acl: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||
|
|
|
@ -19,6 +19,7 @@ const putBucketRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const putObjectRequest = new DummyRequest({
|
||||
|
@ -29,16 +30,17 @@ const putObjectRequest = new DummyRequest({
|
|||
url: `/${bucketName}/${objectName}`,
|
||||
}, postBody);
|
||||
|
||||
const objectLegalHoldXml = status => '<LegalHold ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
`<Status>${status}</Status>` +
|
||||
'</LegalHold>';
|
||||
const objectLegalHoldXml = status => '<LegalHold '
|
||||
+ 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ `<Status>${status}</Status>`
|
||||
+ '</LegalHold>';
|
||||
|
||||
const putLegalHoldReq = status => ({
|
||||
bucketName,
|
||||
objectKey: objectName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: objectLegalHoldXml(status),
|
||||
actionImplicitDenies: false,
|
||||
});
|
||||
|
||||
describe('putObjectLegalHold API', () => {
|
||||
|
@ -77,11 +79,11 @@ describe('putObjectLegalHold API', () => {
|
|||
objectPutLegalHold(authInfo, putLegalHoldReq('ON'), log, err => {
|
||||
assert.ifError(err);
|
||||
return metadata.getObjectMD(bucketName, objectName, {}, log,
|
||||
(err, objMD) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(objMD.legalHold, true);
|
||||
return done();
|
||||
});
|
||||
(err, objMD) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(objMD.legalHold, true);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -89,11 +91,11 @@ describe('putObjectLegalHold API', () => {
|
|||
objectPutLegalHold(authInfo, putLegalHoldReq('OFF'), log, err => {
|
||||
assert.ifError(err);
|
||||
return metadata.getObjectMD(bucketName, objectName, {}, log,
|
||||
(err, objMD) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(objMD.legalHold, false);
|
||||
return done();
|
||||
});
|
||||
(err, objMD) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(objMD.legalHold, false);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -23,6 +23,7 @@ const bucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const putObjectRequest = new DummyRequest({
|
||||
|
@ -33,41 +34,42 @@ const putObjectRequest = new DummyRequest({
|
|||
url: `/${bucketName}/${objectName}`,
|
||||
}, postBody);
|
||||
|
||||
const objectRetentionXmlGovernance = '<Retention ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Mode>GOVERNANCE</Mode>' +
|
||||
`<RetainUntilDate>${expectedDate}</RetainUntilDate>` +
|
||||
'</Retention>';
|
||||
const objectRetentionXmlGovernance = '<Retention '
|
||||
+ 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Mode>GOVERNANCE</Mode>'
|
||||
+ `<RetainUntilDate>${expectedDate}</RetainUntilDate>`
|
||||
+ '</Retention>';
|
||||
|
||||
const objectRetentionXmlCompliance = '<Retention ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Mode>COMPLIANCE</Mode>' +
|
||||
`<RetainUntilDate>${expectedDate}</RetainUntilDate>` +
|
||||
'</Retention>';
|
||||
const objectRetentionXmlCompliance = '<Retention '
|
||||
+ 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Mode>COMPLIANCE</Mode>'
|
||||
+ `<RetainUntilDate>${expectedDate}</RetainUntilDate>`
|
||||
+ '</Retention>';
|
||||
|
||||
const objectRetentionXmlGovernanceLonger = '<Retention ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Mode>GOVERNANCE</Mode>' +
|
||||
`<RetainUntilDate>${moment().add(5, 'days').toISOString()}</RetainUntilDate>` +
|
||||
'</Retention>';
|
||||
const objectRetentionXmlGovernanceLonger = '<Retention '
|
||||
+ 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Mode>GOVERNANCE</Mode>'
|
||||
+ `<RetainUntilDate>${moment().add(5, 'days').toISOString()}</RetainUntilDate>`
|
||||
+ '</Retention>';
|
||||
|
||||
const objectRetentionXmlGovernanceShorter = '<Retention ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Mode>GOVERNANCE</Mode>' +
|
||||
`<RetainUntilDate>${moment().add(1, 'days').toISOString()}</RetainUntilDate>` +
|
||||
'</Retention>';
|
||||
const objectRetentionXmlGovernanceShorter = '<Retention '
|
||||
+ 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Mode>GOVERNANCE</Mode>'
|
||||
+ `<RetainUntilDate>${moment().add(1, 'days').toISOString()}</RetainUntilDate>`
|
||||
+ '</Retention>';
|
||||
|
||||
const objectRetentionXmlComplianceShorter = '<Retention ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
'<Mode>COMPLIANCE</Mode>' +
|
||||
`<RetainUntilDate>${moment().add(1, 'days').toISOString()}</RetainUntilDate>` +
|
||||
'</Retention>';
|
||||
const objectRetentionXmlComplianceShorter = '<Retention '
|
||||
+ 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
|
||||
+ '<Mode>COMPLIANCE</Mode>'
|
||||
+ `<RetainUntilDate>${moment().add(1, 'days').toISOString()}</RetainUntilDate>`
|
||||
+ '</Retention>';
|
||||
|
||||
const putObjRetRequestGovernance = {
|
||||
bucketName,
|
||||
objectKey: objectName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: objectRetentionXmlGovernance,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const putObjRetRequestGovernanceWithHeader = {
|
||||
|
@ -78,6 +80,7 @@ const putObjRetRequestGovernanceWithHeader = {
|
|||
'x-amz-bypass-governance-retention': 'true',
|
||||
},
|
||||
post: objectRetentionXmlGovernance,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const putObjRetRequestCompliance = {
|
||||
|
@ -85,6 +88,7 @@ const putObjRetRequestCompliance = {
|
|||
objectKey: objectName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: objectRetentionXmlCompliance,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const putObjRetRequestComplianceShorter = {
|
||||
|
@ -92,6 +96,7 @@ const putObjRetRequestComplianceShorter = {
|
|||
objectKey: objectName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: objectRetentionXmlComplianceShorter,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const putObjRetRequestGovernanceLonger = {
|
||||
|
@ -99,6 +104,7 @@ const putObjRetRequestGovernanceLonger = {
|
|||
objectKey: objectName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: objectRetentionXmlGovernanceLonger,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const putObjRetRequestGovernanceShorter = {
|
||||
|
@ -106,6 +112,7 @@ const putObjRetRequestGovernanceShorter = {
|
|||
objectKey: objectName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
post: objectRetentionXmlGovernanceShorter,
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
describe('putObjectRetention API', () => {
|
||||
|
@ -144,12 +151,12 @@ describe('putObjectRetention API', () => {
|
|||
objectPutRetention(authInfo, putObjRetRequestGovernance, log, err => {
|
||||
assert.ifError(err);
|
||||
return metadata.getObjectMD(bucketName, objectName, {}, log,
|
||||
(err, objMD) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(objMD.retentionMode, expectedMode);
|
||||
assert.strictEqual(objMD.retentionDate, expectedDate);
|
||||
return done();
|
||||
});
|
||||
(err, objMD) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(objMD.retentionMode, expectedMode);
|
||||
assert.strictEqual(objMD.retentionDate, expectedDate);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -3,16 +3,15 @@ const assert = require('assert');
|
|||
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||
const objectPut = require('../../../lib/api/objectPut');
|
||||
const objectPutTagging = require('../../../lib/api/objectPutTagging');
|
||||
const { _validator, parseTagXml }
|
||||
= require('arsenal').s3middleware.tagging;
|
||||
const { cleanup,
|
||||
const { _validator, parseTagXml } = require('arsenal').s3middleware.tagging;
|
||||
const {
|
||||
cleanup,
|
||||
DummyRequestLogger,
|
||||
makeAuthInfo,
|
||||
TaggingConfigTester }
|
||||
= require('../helpers');
|
||||
TaggingConfigTester,
|
||||
} = require('../helpers');
|
||||
const metadata = require('../../../lib/metadata/wrapper');
|
||||
const { taggingTests }
|
||||
= require('../../functional/aws-node-sdk/lib/utility/tagging.js');
|
||||
const { taggingTests } = require('../../functional/aws-node-sdk/lib/utility/tagging.js');
|
||||
const DummyRequest = require('../DummyRequest');
|
||||
|
||||
const log = new DummyRequestLogger();
|
||||
|
@ -25,6 +24,7 @@ const testBucketPutRequest = {
|
|||
bucketName,
|
||||
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
const testPutObjectRequest = new DummyRequest({
|
||||
|
@ -42,14 +42,14 @@ function _checkError(err, code, errorName) {
|
|||
}
|
||||
|
||||
function _generateSampleXml(key, value) {
|
||||
const xml = '<Tagging>' +
|
||||
'<TagSet>' +
|
||||
'<Tag>' +
|
||||
`<Key>${key}</Key>` +
|
||||
`<Value>${value}</Value>` +
|
||||
'</Tag>' +
|
||||
'</TagSet>' +
|
||||
'</Tagging>';
|
||||
const xml = '<Tagging>'
|
||||
+ '<TagSet>'
|
||||
+ '<Tag>'
|
||||
+ `<Key>${key}</Key>`
|
||||
+ `<Value>${value}</Value>`
|
||||
+ '</Tag>'
|
||||
+ '</TagSet>'
|
||||
+ '</Tagging>';
|
||||
|
||||
return xml;
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ describe('putObjectTagging API', () => {
|
|||
return done(err);
|
||||
}
|
||||
return objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||
done);
|
||||
done);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -78,16 +78,16 @@ describe('putObjectTagging API', () => {
|
|||
return done(err);
|
||||
}
|
||||
return metadata.getObjectMD(bucketName, objectName, {}, log,
|
||||
(err, objectMD) => {
|
||||
if (err) {
|
||||
process.stdout.write(`Err retrieving object MD ${err}`);
|
||||
return done(err);
|
||||
}
|
||||
const uploadedTags = objectMD.tags;
|
||||
assert.deepStrictEqual(uploadedTags, taggingUtil.getTags());
|
||||
assert.strictEqual(objectMD.originOp, 's3:ObjectTagging:Put');
|
||||
return done();
|
||||
});
|
||||
(err, objectMD) => {
|
||||
if (err) {
|
||||
process.stdout.write(`Err retrieving object MD ${err}`);
|
||||
return done(err);
|
||||
}
|
||||
const uploadedTags = objectMD.tags;
|
||||
assert.deepStrictEqual(uploadedTags, taggingUtil.getTags());
|
||||
assert.strictEqual(objectMD.originOp, 's3:ObjectTagging:Put');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -95,55 +95,101 @@ describe('putObjectTagging API', () => {
|
|||
describe('PUT object tagging :: helper validation functions ', () => {
|
||||
describe('validateTagStructure ', () => {
|
||||
it('should return expected true if tag is valid false/undefined if not',
|
||||
done => {
|
||||
const tags = [
|
||||
{ tagTest: { Key: ['foo'], Value: ['bar'] }, isValid: true },
|
||||
{ tagTest: { Key: ['foo'] }, isValid: false },
|
||||
{ tagTest: { Value: ['bar'] }, isValid: false },
|
||||
{ tagTest: { Keys: ['foo'], Value: ['bar'] }, isValid: false },
|
||||
{ tagTest: { Key: ['foo', 'boo'], Value: ['bar'] },
|
||||
isValid: false },
|
||||
{ tagTest: { Key: ['foo'], Value: ['bar', 'boo'] },
|
||||
isValid: false },
|
||||
{ tagTest: { Key: ['foo', 'boo'], Value: ['bar', 'boo'] },
|
||||
isValid: false },
|
||||
{ tagTest: { Key: ['foo'], Values: ['bar'] }, isValid: false },
|
||||
{ tagTest: { Keys: ['foo'], Values: ['bar'] }, isValid: false },
|
||||
];
|
||||
done => {
|
||||
const tags = [
|
||||
{ tagTest: { Key: ['foo'], Value: ['bar'] }, isValid: true },
|
||||
{ tagTest: { Key: ['foo'] }, isValid: false },
|
||||
{ tagTest: { Value: ['bar'] }, isValid: false },
|
||||
{ tagTest: { Keys: ['foo'], Value: ['bar'] }, isValid: false },
|
||||
{
|
||||
tagTest: { Key: ['foo', 'boo'], Value: ['bar'] },
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
tagTest: { Key: ['foo'], Value: ['bar', 'boo'] },
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
tagTest: { Key: ['foo', 'boo'], Value: ['bar', 'boo'] },
|
||||
isValid: false,
|
||||
},
|
||||
{ tagTest: { Key: ['foo'], Values: ['bar'] }, isValid: false },
|
||||
{ tagTest: { Keys: ['foo'], Values: ['bar'] }, isValid: false },
|
||||
];
|
||||
|
||||
for (let i = 0; i < tags.length; i++) {
|
||||
const tag = tags[i];
|
||||
const result = _validator.validateTagStructure(tag.tagTest);
|
||||
if (tag.isValid) {
|
||||
assert(result);
|
||||
} else {
|
||||
assert(!result);
|
||||
for (let i = 0; i < tags.length; i++) {
|
||||
const tag = tags[i];
|
||||
const result = _validator.validateTagStructure(tag.tagTest);
|
||||
if (tag.isValid) {
|
||||
assert(result);
|
||||
} else {
|
||||
assert(!result);
|
||||
}
|
||||
}
|
||||
}
|
||||
done();
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
||||
describe('validateXMLStructure ', () => {
|
||||
it('should return expected true if tag is valid false/undefined ' +
|
||||
'if not', done => {
|
||||
it('should return expected true if tag is valid false/undefined '
|
||||
+ 'if not', done => {
|
||||
const tags = [
|
||||
{ tagging: { Tagging: { TagSet: [{ Tag: [] }] } }, isValid:
|
||||
true },
|
||||
{
|
||||
tagging: { Tagging: { TagSet: [{ Tag: [] }] } },
|
||||
isValid:
|
||||
true,
|
||||
},
|
||||
{ tagging: { Tagging: { TagSet: [''] } }, isValid: true },
|
||||
{ tagging: { Tagging: { TagSet: [] } }, isValid: false },
|
||||
{ tagging: { Tagging: { TagSet: [{}] } }, isValid: false },
|
||||
{ tagging: { Tagging: { Tagset: [{ Tag: [] }] } }, isValid:
|
||||
false },
|
||||
{ tagging: { Tagging: { Tagset: [{ Tag: [] }] },
|
||||
ExtraTagging: 'extratagging' }, isValid: false },
|
||||
{ tagging: { Tagging: { Tagset: [{ Tag: [] }], ExtraTagset:
|
||||
'extratagset' } }, isValid: false },
|
||||
{ tagging: { Tagging: { Tagset: [{ Tag: [] }], ExtraTagset:
|
||||
'extratagset' } }, isValid: false },
|
||||
{ tagging: { Tagging: { Tagset: [{ Tag: [], ExtraTag:
|
||||
'extratag' }] } }, isValid: false },
|
||||
{ tagging: { Tagging: { Tagset: [{ Tag: {} }] } }, isValid:
|
||||
false },
|
||||
{
|
||||
tagging: { Tagging: { Tagset: [{ Tag: [] }] } },
|
||||
isValid:
|
||||
false,
|
||||
},
|
||||
{
|
||||
tagging: {
|
||||
Tagging: { Tagset: [{ Tag: [] }] },
|
||||
ExtraTagging: 'extratagging',
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
tagging: {
|
||||
Tagging: {
|
||||
Tagset: [{ Tag: [] }],
|
||||
ExtraTagset:
|
||||
'extratagset',
|
||||
},
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
tagging: {
|
||||
Tagging: {
|
||||
Tagset: [{ Tag: [] }],
|
||||
ExtraTagset:
|
||||
'extratagset',
|
||||
},
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
tagging: {
|
||||
Tagging: {
|
||||
Tagset: [{
|
||||
Tag: [],
|
||||
ExtraTag:
|
||||
'extratag',
|
||||
}],
|
||||
},
|
||||
},
|
||||
isValid: false,
|
||||
},
|
||||
{
|
||||
tagging: { Tagging: { Tagset: [{ Tag: {} }] } },
|
||||
isValid:
|
||||
false,
|
||||
},
|
||||
];
|
||||
|
||||
for (let i = 0; i < tags.length; i++) {
|
||||
|
@ -172,8 +218,8 @@ describe('PUT object tagging :: helper validation functions ', () => {
|
|||
|
||||
taggingTests.forEach(taggingTest => {
|
||||
it(taggingTest.it, done => {
|
||||
const key = taggingTest.tag.key;
|
||||
const value = taggingTest.tag.value;
|
||||
const { key } = taggingTest.tag;
|
||||
const { value } = taggingTest.tag;
|
||||
const xml = _generateSampleXml(key, value);
|
||||
parseTagXml(xml, log, (err, result) => {
|
||||
if (taggingTest.error) {
|
||||
|
|
|
@ -24,6 +24,7 @@ describe('serviceGet API', () => {
|
|||
parsedHost: 's3.amazonaws.com',
|
||||
headers: { host: 's3.amazonaws.com' },
|
||||
url: '/',
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
|
||||
it('should return the list of buckets owned by the user', done => {
|
||||
|
|
|
@ -0,0 +1,317 @@
|
|||
const assert = require('assert');
|
||||
const { checkBucketAcls, checkObjectAcls } = require('../../../lib/api/apiUtils/authorization/permissionChecks');
|
||||
const constants = require('../../../constants');
|
||||
|
||||
const { bucketOwnerActions, logId } = constants;
|
||||
|
||||
describe('checkBucketAcls', () => {
|
||||
const mockBucket = {
|
||||
getOwner: () => 'ownerId',
|
||||
getAcl: () => ({
|
||||
Canned: '',
|
||||
FULL_CONTROL: [],
|
||||
READ: [],
|
||||
READ_ACP: [],
|
||||
WRITE: [],
|
||||
WRITE_ACP: [],
|
||||
}),
|
||||
};
|
||||
|
||||
const testScenarios = [
|
||||
{
|
||||
description: 'should return true if bucket owner matches canonicalID',
|
||||
input: {
|
||||
bucketAcl: {}, requestType: 'anyType', canonicalID: 'ownerId', mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return true for objectGetTagging when mainApiCall is objectGet',
|
||||
input: {
|
||||
bucketAcl: {}, requestType: 'objectGetTagging', canonicalID: 'anyId', mainApiCall: 'objectGet',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return true for objectPutTagging when mainApiCall is objectPut',
|
||||
input: {
|
||||
bucketAcl: {}, requestType: 'objectPutTagging', canonicalID: 'anyId', mainApiCall: 'objectPut',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return true for objectPutLegalHold when mainApiCall is objectPut',
|
||||
input: {
|
||||
bucketAcl: {}, requestType: 'objectPutLegalHold', canonicalID: 'anyId', mainApiCall: 'objectPut',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return true for objectPutRetention when mainApiCall is objectPut',
|
||||
input: {
|
||||
bucketAcl: {}, requestType: 'objectPutRetention', canonicalID: 'anyId', mainApiCall: 'objectPut',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return true for bucketGet if canned acl is public-read-write',
|
||||
input: {
|
||||
bucketAcl: { Canned: 'public-read-write' },
|
||||
requestType: 'bucketGet',
|
||||
canonicalID: 'anyId',
|
||||
mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return true for bucketGet if canned acl is authenticated-read and id is not publicId',
|
||||
input: {
|
||||
bucketAcl: { Canned: 'authenticated-read' },
|
||||
requestType: 'bucketGet',
|
||||
canonicalID: 'anyIdNotPublic',
|
||||
mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return true for bucketHead if canned acl is public-read',
|
||||
input: {
|
||||
bucketAcl: { Canned: 'public-read' },
|
||||
requestType: 'bucketHead',
|
||||
canonicalID: 'anyId',
|
||||
mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return false for bucketPut even if canonicalID has FULL_CONTROL and write access ',
|
||||
input: {
|
||||
bucketAcl: {
|
||||
FULL_CONTROL: ['anyId'],
|
||||
WRITE: ['anyId'],
|
||||
},
|
||||
requestType: 'bucketPut',
|
||||
canonicalID: 'anyId',
|
||||
mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
description: 'should return true for log-delivery-write ACL when canonicalID matches logId',
|
||||
input: {
|
||||
bucketAcl: { Canned: 'log-delivery-write' },
|
||||
requestType: 'bucketGetACL',
|
||||
canonicalID: logId,
|
||||
mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return false when the canonicalID is not the owner and has no ACL permissions',
|
||||
input: {
|
||||
bucketAcl: {
|
||||
FULL_CONTROL: ['someOtherId'],
|
||||
WRITE: ['someOtherId'],
|
||||
},
|
||||
requestType: 'objectPut',
|
||||
canonicalID: 'anyId',
|
||||
mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
description: 'should return false for bucketPutACL if canonicalID does not have ACL permissions',
|
||||
input: {
|
||||
bucketAcl: {
|
||||
FULL_CONTROL: ['someOtherId'],
|
||||
WRITE_ACP: ['someOtherId'],
|
||||
},
|
||||
requestType: 'bucketPutACL',
|
||||
canonicalID: 'anyId',
|
||||
mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
description: 'should return true for bucketGet if canonicalID has FULL_CONTROL access',
|
||||
input: {
|
||||
bucketAcl: { FULL_CONTROL: ['anyId'], READ: [] },
|
||||
requestType: 'bucketGet',
|
||||
canonicalID: 'anyId',
|
||||
mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return true for bucketGetACL if canonicalID has FULL_CONTROL',
|
||||
input: {
|
||||
bucketAcl: { FULL_CONTROL: ['anyId'], READ_ACP: [] },
|
||||
requestType: 'bucketGetACL',
|
||||
canonicalID: 'anyId',
|
||||
mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return true for objectDelete if bucketAcl.Canned is public-read-write',
|
||||
input: {
|
||||
bucketAcl: { Canned: 'public-read-write' },
|
||||
requestType: 'objectDelete',
|
||||
canonicalID: 'anyId',
|
||||
mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return true for requestType ending with "Version"',
|
||||
input: {
|
||||
bucketAcl: {},
|
||||
requestType: 'objectGetVersion',
|
||||
canonicalID: 'anyId',
|
||||
mainApiCall: 'objectGet',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return true for objectPutACL',
|
||||
input: {
|
||||
bucketAcl: {},
|
||||
requestType: 'objectPutACL',
|
||||
canonicalID: 'anyId',
|
||||
mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return true for objectGetACL',
|
||||
input: {
|
||||
bucketAcl: {},
|
||||
requestType: 'objectGetACL',
|
||||
canonicalID: 'anyId',
|
||||
mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
description: 'should return false for unmatched scenarios',
|
||||
input: {
|
||||
bucketAcl: {},
|
||||
requestType: 'unmatchedRequest',
|
||||
canonicalID: 'anyId',
|
||||
mainApiCall: 'anyApiCall',
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
];
|
||||
|
||||
testScenarios.forEach(scenario => {
|
||||
it(scenario.description, () => {
|
||||
// Mock the bucket based on the test scenario's input
|
||||
mockBucket.getAcl = () => scenario.input.bucketAcl;
|
||||
|
||||
const result = checkBucketAcls(mockBucket,
|
||||
scenario.input.requestType, scenario.input.canonicalID, scenario.input.mainApiCall);
|
||||
assert.strictEqual(result, scenario.expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('checkObjectAcls', () => {
|
||||
const mockBucket = {
|
||||
getOwner: () => 'bucketOwnerId',
|
||||
getName: () => 'bucketName',
|
||||
getAcl: () => ({ Canned: '' }),
|
||||
};
|
||||
const mockObjectMD = {
|
||||
'owner-id': 'objectOwnerId',
|
||||
'acl': {
|
||||
Canned: '',
|
||||
FULL_CONTROL: [],
|
||||
READ: [],
|
||||
READ_ACP: [],
|
||||
WRITE: [],
|
||||
WRITE_ACP: [],
|
||||
},
|
||||
};
|
||||
|
||||
it('should return true if request type is in bucketOwnerActions and bucket owner matches canonicalID', () => {
|
||||
assert.strictEqual(checkObjectAcls(mockBucket, mockObjectMD, bucketOwnerActions[0],
|
||||
'bucketOwnerId', false, false, 'anyApiCall'), true);
|
||||
});
|
||||
|
||||
it('should return true if objectMD owner matches canonicalID', () => {
|
||||
assert.strictEqual(checkObjectAcls(mockBucket, mockObjectMD, 'anyType',
|
||||
'objectOwnerId', false, false, 'anyApiCall'), true);
|
||||
});
|
||||
|
||||
it('should return true for objectGetTagging when mainApiCall is objectGet and conditions met', () => {
|
||||
assert.strictEqual(checkObjectAcls(mockBucket, mockObjectMD, 'objectGetTagging',
|
||||
'anyIdNotPublic', true, true, 'objectGet'), true);
|
||||
});
|
||||
|
||||
it('should return false if no acl provided in objectMD', () => {
|
||||
const objMDWithoutAcl = Object.assign({}, mockObjectMD);
|
||||
delete objMDWithoutAcl.acl;
|
||||
assert.strictEqual(checkObjectAcls(mockBucket, objMDWithoutAcl, 'anyType',
|
||||
'anyId', false, false, 'anyApiCall'), false);
|
||||
});
|
||||
|
||||
const tests = [
|
||||
{
|
||||
acl: 'public-read', reqType: 'objectGet', id: 'anyIdNotPublic', expected: true,
|
||||
},
|
||||
{
|
||||
acl: 'public-read-write', reqType: 'objectGet', id: 'anyIdNotPublic', expected: true,
|
||||
},
|
||||
{
|
||||
acl: 'authenticated-read', reqType: 'objectGet', id: 'anyIdNotPublic', expected: true,
|
||||
},
|
||||
{
|
||||
acl: 'bucket-owner-read', reqType: 'objectGet', id: 'bucketOwnerId', expected: true,
|
||||
},
|
||||
{
|
||||
acl: 'bucket-owner-full-control', reqType: 'objectGet', id: 'bucketOwnerId', expected: true,
|
||||
},
|
||||
{
|
||||
aclList: ['someId', 'anyIdNotPublic'],
|
||||
aclField: 'FULL_CONTROL',
|
||||
reqType: 'objectGet',
|
||||
id: 'anyIdNotPublic',
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
aclList: ['someId', 'anyIdNotPublic'],
|
||||
aclField: 'READ',
|
||||
reqType: 'objectGet',
|
||||
id: 'anyIdNotPublic',
|
||||
expected: true,
|
||||
},
|
||||
{ reqType: 'objectPut', id: 'anyId', expected: true },
|
||||
{ reqType: 'objectDelete', id: 'anyId', expected: true },
|
||||
{
|
||||
aclList: ['anyId'], aclField: 'FULL_CONTROL', reqType: 'objectPutACL', id: 'anyId', expected: true,
|
||||
},
|
||||
{
|
||||
aclList: ['anyId'], aclField: 'FULL_CONTROL', reqType: 'objectGetACL', id: 'anyId', expected: true,
|
||||
},
|
||||
{
|
||||
acl: '', reqType: 'objectGet', id: 'randomId', expected: false,
|
||||
},
|
||||
];
|
||||
|
||||
tests.forEach(test => {
|
||||
it(`should return ${test.expected} for ${test.reqType} with ACL as ${test.acl
|
||||
|| (`${test.aclField}:${JSON.stringify(test.aclList)}`)}`, () => {
|
||||
if (test.acl) {
|
||||
mockObjectMD.acl.Canned = test.acl;
|
||||
} else if (test.aclList && test.aclField) {
|
||||
mockObjectMD.acl[test.aclField] = test.aclList;
|
||||
}
|
||||
|
||||
assert.strictEqual(
|
||||
checkObjectAcls(mockBucket, mockObjectMD, test.reqType, test.id, false, false, 'anyApiCall'),
|
||||
test.expected,
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -340,6 +340,7 @@ class CorsConfigTester {
|
|||
},
|
||||
url: '/?cors',
|
||||
query: { cors: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
if (method === 'PUT') {
|
||||
request.post = body || this.constructXml();
|
||||
|
@ -381,6 +382,7 @@ const versioningTestUtils = {
|
|||
},
|
||||
url: '/?versioning',
|
||||
query: { versioning: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
const xml = '<VersioningConfiguration ' +
|
||||
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
|
||||
|
@ -431,6 +433,7 @@ class TaggingConfigTester {
|
|||
objectKey: objectName,
|
||||
url: '/?tagging',
|
||||
query: { tagging: '' },
|
||||
actionImplicitDenies: false,
|
||||
};
|
||||
if (method === 'PUT') {
|
||||
request.post = body || this.constructXml();
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
const assert = require('assert');
|
||||
|
||||
const { models } = require('arsenal');
|
||||
const { BucketInfo } = models;
|
||||
const { DummyRequestLogger, makeAuthInfo } = require('../helpers');
|
||||
|
||||
const creationDate = new Date().toJSON();
|
||||
const authInfo = makeAuthInfo('accessKey');
|
||||
const otherAuthInfo = makeAuthInfo('otherAccessKey');
|
||||
const ownerCanonicalId = authInfo.getCanonicalID();
|
||||
|
||||
const bucket = new BucketInfo('niftyBucket', ownerCanonicalId,
|
||||
authInfo.getAccountDisplayName(), creationDate);
|
||||
const log = new DummyRequestLogger();
|
||||
|
||||
const { validateBucket } = require('../../../lib/metadata/metadataUtils');
|
||||
|
||||
describe('validateBucket', () => {
|
||||
it('action bucketPutPolicy by bucket owner', () => {
|
||||
const validationResult = validateBucket(bucket, {
|
||||
authInfo,
|
||||
requestType: 'bucketPutPolicy',
|
||||
request: null,
|
||||
}, false, log);
|
||||
assert.ifError(validationResult);
|
||||
});
|
||||
it('action bucketPutPolicy by other than bucket owner', () => {
|
||||
const validationResult = validateBucket(bucket, {
|
||||
authInfo: otherAuthInfo,
|
||||
requestType: 'bucketPutPolicy',
|
||||
request: null,
|
||||
}, false, log);
|
||||
assert(validationResult);
|
||||
assert(validationResult.is.MethodNotAllowed);
|
||||
});
|
||||
|
||||
it('action bucketGet by bucket owner', () => {
|
||||
const validationResult = validateBucket(bucket, {
|
||||
authInfo,
|
||||
requestType: 'bucketGet',
|
||||
request: null,
|
||||
}, false, log);
|
||||
assert.ifError(validationResult);
|
||||
});
|
||||
|
||||
it('action bucketGet by other than bucket owner', () => {
|
||||
const validationResult = validateBucket(bucket, {
|
||||
authInfo: otherAuthInfo,
|
||||
requestType: 'bucketGet',
|
||||
request: null,
|
||||
}, log, false);
|
||||
assert(validationResult);
|
||||
assert(validationResult.is.AccessDenied);
|
||||
});
|
||||
});
|
|
@ -488,9 +488,9 @@ arraybuffer.slice@~0.0.7:
|
|||
optionalDependencies:
|
||||
ioctl "^2.0.2"
|
||||
|
||||
"arsenal@git+https://github.com/scality/arsenal#7.10.48":
|
||||
version "7.10.48"
|
||||
resolved "git+https://github.com/scality/arsenal#f49cea3914390880008e3d41cedb1a02f9d99f39"
|
||||
"arsenal@git+https://github.com/scality/arsenal#7.10.49":
|
||||
version "7.10.49"
|
||||
resolved "git+https://github.com/scality/arsenal#fbf5562a1180055249745881c1a324562d7cdc8a"
|
||||
dependencies:
|
||||
"@types/async" "^3.2.12"
|
||||
"@types/utf8" "^3.0.1"
|
||||
|
|
Loading…
Reference in New Issue