Compare commits

...

6 Commits

71 changed files with 2013 additions and 1422 deletions

View File

@ -114,6 +114,7 @@ const api = {
// no need to check auth on website or cors preflight requests // no need to check auth on website or cors preflight requests
if (apiMethod === 'websiteGet' || apiMethod === 'websiteHead' || if (apiMethod === 'websiteGet' || apiMethod === 'websiteHead' ||
apiMethod === 'corsPreflight') { apiMethod === 'corsPreflight') {
request.iamAuthzResults = false;
return this[apiMethod](request, log, callback); return this[apiMethod](request, log, callback);
} }
@ -136,15 +137,25 @@ const api = {
const requestContexts = prepareRequestContexts(apiMethod, request, const requestContexts = prepareRequestContexts(apiMethod, request,
sourceBucket, sourceObject, sourceVersionId); sourceBucket, sourceObject, sourceVersionId);
// Extract all the _apiMethods and store them in an array
const apiMethods = requestContexts ? requestContexts.map(context => context._apiMethod) : [];
// Attach the names to the current request
// eslint-disable-next-line no-param-reassign
request.apiMethods = apiMethods;
function checkAuthResults(authResults) { function checkAuthResults(authResults) {
let returnTagCount = true; let returnTagCount = true;
const isImplicitDeny = {};
let isOnlyImplicitDeny = true;
if (apiMethod === 'objectGet') { if (apiMethod === 'objectGet') {
// first item checks s3:GetObject(Version) action // first item checks s3:GetObject(Version) action
if (!authResults[0].isAllowed) { if (!authResults[0].isAllowed && !authResults[0].isImplicit) {
log.trace('get object authorization denial from Vault'); log.trace('get object authorization denial from Vault');
return errors.AccessDenied; return errors.AccessDenied;
} }
// TODO add support for returnTagCount in the bucket policy
// checks
isImplicitDeny[authResults[0].action] = authResults[0].isImplicit;
// second item checks s3:GetObject(Version)Tagging action // second item checks s3:GetObject(Version)Tagging action
if (!authResults[1].isAllowed) { if (!authResults[1].isAllowed) {
log.trace('get tagging authorization denial ' + log.trace('get tagging authorization denial ' +
@ -153,13 +164,25 @@ const api = {
} }
} else { } else {
for (let i = 0; i < authResults.length; i++) { for (let i = 0; i < authResults.length; i++) {
if (!authResults[i].isAllowed) { isImplicitDeny[authResults[i].action] = true;
if (!authResults[i].isAllowed && !authResults[i].isImplicit) {
// Any explicit deny rejects the current API call
log.trace('authorization denial from Vault'); log.trace('authorization denial from Vault');
return errors.AccessDenied; return errors.AccessDenied;
} else if (authResults[i].isAllowed) {
// If the action is allowed, the result is not implicit
// Deny.
isImplicitDeny[authResults[i].action] = false;
isOnlyImplicitDeny = false;
} }
} }
} }
return returnTagCount; // These two APIs cannot use ACLs or Bucket Policies, hence, any
// implicit deny from vault must be treated as an explicit deny.
if ((apiMethod === 'bucketPut' || apiMethod === 'serviceGet') && isOnlyImplicitDeny) {
return errors.AccessDenied;
}
return { returnTagCount, isImplicitDeny };
} }
return async.waterfall([ return async.waterfall([
@ -237,7 +260,14 @@ const api = {
if (checkedResults instanceof Error) { if (checkedResults instanceof Error) {
return callback(checkedResults); return callback(checkedResults);
} }
returnTagCount = checkedResults; returnTagCount = checkedResults.returnTagCount;
request.iamAuthzResults = checkedResults.isImplicitDeny;
} else {
// create an object of keys apiMethods with all values to false
request.iamAuthzResults = apiMethods.reduce((acc, curr) => {
acc[curr] = false;
return acc;
}, {});
} }
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') { if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
request._response = response; request._response = response;

View File

@ -8,13 +8,32 @@ const { allAuthedUsersId, bucketOwnerActions, logId, publicId,
const publicReadBuckets = process.env.ALLOW_PUBLIC_READ_BUCKETS ? const publicReadBuckets = process.env.ALLOW_PUBLIC_READ_BUCKETS ?
process.env.ALLOW_PUBLIC_READ_BUCKETS.split(',') : []; process.env.ALLOW_PUBLIC_READ_BUCKETS.split(',') : [];
function checkBucketAcls(bucket, requestType, canonicalID) { function checkBucketAcls(bucket, requestType, canonicalID, mainApiCall) {
// Same logic applies on the Versioned APIs, so let's simplify it.
const requestTypeParsed = requestType.endsWith('Version') ?
requestType.slice(0, -7) : requestType;
if (bucket.getOwner() === canonicalID) { if (bucket.getOwner() === canonicalID) {
return true; return true;
} }
// Backward compatibility
const arrayOfAllowed = [
'objectPutTagging',
'objectPutLegalHold',
'objectPutRetention',
];
if (mainApiCall === 'objectGet') {
if (requestTypeParsed === 'objectGetTagging') {
return true;
}
}
if (mainApiCall === 'objectPut') {
if (arrayOfAllowed.includes(requestTypeParsed)) {
return true;
}
}
const bucketAcl = bucket.getAcl(); const bucketAcl = bucket.getAcl();
if (requestType === 'bucketGet' || requestType === 'bucketHead') { if (requestTypeParsed === 'bucketGet' || requestTypeParsed === 'bucketHead') {
if (bucketAcl.Canned === 'public-read' if (bucketAcl.Canned === 'public-read'
|| bucketAcl.Canned === 'public-read-write' || bucketAcl.Canned === 'public-read-write'
|| (bucketAcl.Canned === 'authenticated-read' || (bucketAcl.Canned === 'authenticated-read'
@ -32,7 +51,7 @@ function checkBucketAcls(bucket, requestType, canonicalID) {
return true; return true;
} }
} }
if (requestType === 'bucketGetACL') { if (requestTypeParsed === 'bucketGetACL') {
if ((bucketAcl.Canned === 'log-delivery-write' if ((bucketAcl.Canned === 'log-delivery-write'
&& canonicalID === logId) && canonicalID === logId)
|| bucketAcl.FULL_CONTROL.indexOf(canonicalID) > -1 || bucketAcl.FULL_CONTROL.indexOf(canonicalID) > -1
@ -48,7 +67,7 @@ function checkBucketAcls(bucket, requestType, canonicalID) {
} }
} }
if (requestType === 'bucketPutACL') { if (requestTypeParsed === 'bucketPutACL') {
if (bucketAcl.FULL_CONTROL.indexOf(canonicalID) > -1 if (bucketAcl.FULL_CONTROL.indexOf(canonicalID) > -1
|| bucketAcl.WRITE_ACP.indexOf(canonicalID) > -1) { || bucketAcl.WRITE_ACP.indexOf(canonicalID) > -1) {
return true; return true;
@ -62,11 +81,7 @@ function checkBucketAcls(bucket, requestType, canonicalID) {
} }
} }
if (requestType === 'bucketDelete' && bucket.getOwner() === canonicalID) { if (requestTypeParsed === 'objectDelete' || requestTypeParsed === 'objectPut') {
return true;
}
if (requestType === 'objectDelete' || requestType === 'objectPut') {
if (bucketAcl.Canned === 'public-read-write' if (bucketAcl.Canned === 'public-read-write'
|| bucketAcl.FULL_CONTROL.indexOf(canonicalID) > -1 || bucketAcl.FULL_CONTROL.indexOf(canonicalID) > -1
|| bucketAcl.WRITE.indexOf(canonicalID) > -1) { || bucketAcl.WRITE.indexOf(canonicalID) > -1) {
@ -86,11 +101,12 @@ function checkBucketAcls(bucket, requestType, canonicalID) {
// objectPutACL, objectGetACL, objectHead or objectGet, the bucket // objectPutACL, objectGetACL, objectHead or objectGet, the bucket
// authorization check should just return true so can move on to check // authorization check should just return true so can move on to check
// rights at the object level. // rights at the object level.
return (requestType === 'objectPutACL' || requestType === 'objectGetACL' || return (requestTypeParsed === 'objectPutACL' || requestTypeParsed === 'objectGetACL' ||
requestType === 'objectGet' || requestType === 'objectHead'); requestTypeParsed === 'objectGet' || requestTypeParsed === 'objectHead');
} }
function checkObjectAcls(bucket, objectMD, requestType, canonicalID) { function checkObjectAcls(bucket, objectMD, requestType, canonicalID, requesterIsNotUser,
isUserUnauthenticated, mainApiCall) {
const bucketOwner = bucket.getOwner(); const bucketOwner = bucket.getOwner();
// acls don't distinguish between users and accounts, so both should be allowed // acls don't distinguish between users and accounts, so both should be allowed
if (bucketOwnerActions.includes(requestType) if (bucketOwnerActions.includes(requestType)
@ -100,6 +116,15 @@ function checkObjectAcls(bucket, objectMD, requestType, canonicalID) {
if (objectMD['owner-id'] === canonicalID) { if (objectMD['owner-id'] === canonicalID) {
return true; return true;
} }
// Backward compatibility
if (mainApiCall === 'objectGet') {
if ((isUserUnauthenticated || (requesterIsNotUser && bucketOwner === objectMD['owner-id']))
&& requestType === 'objectGetTagging') {
return true;
}
}
if (!objectMD.acl) { if (!objectMD.acl) {
return false; return false;
} }
@ -268,7 +293,26 @@ function checkBucketPolicy(policy, requestType, canonicalID, arn, bucketOwner, l
return permission; return permission;
} }
function isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request) { function isBucketAuthorizedNew(bucket, requestTypes, canonicalID, authInfo, iamAuthzResults, log, request) {
if (!Array.isArray(requestTypes)) {
// eslint-disable-next-line no-param-reassign
requestTypes = [requestTypes];
}
if (iamAuthzResults === false) {
// eslint-disable-next-line no-param-reassign
iamAuthzResults = {};
}
// By default, all missing actions are defined as allowed from IAM, to be
// backward compatible
requestTypes.forEach(requestType => {
if (iamAuthzResults[requestType] === undefined) {
// eslint-disable-next-line no-param-reassign
iamAuthzResults[requestType] = false;
}
});
const mainApiCall = requestTypes[0];
const results = {};
requestTypes.forEach(_requestType => {
// Check to see if user is authorized to perform a // Check to see if user is authorized to perform a
// particular action on bucket based on ACLs. // particular action on bucket based on ACLs.
// TODO: Add IAM checks // TODO: Add IAM checks
@ -280,63 +324,178 @@ function isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, req
} }
// if the bucket owner is an account, users should not have default access // if the bucket owner is an account, users should not have default access
if ((bucket.getOwner() === canonicalID) && requesterIsNotUser) { if ((bucket.getOwner() === canonicalID) && requesterIsNotUser) {
return true; results[_requestType] = iamAuthzResults[_requestType] === false;
return;
} }
const aclPermission = checkBucketAcls(bucket, requestType, canonicalID); const aclPermission = checkBucketAcls(bucket, _requestType, canonicalID, mainApiCall);
const bucketPolicy = bucket.getBucketPolicy(); const bucketPolicy = bucket.getBucketPolicy();
if (!bucketPolicy) { if (!bucketPolicy) {
return aclPermission; results[_requestType] = iamAuthzResults[_requestType] === false && aclPermission;
return;
} }
const bucketPolicyPermission = checkBucketPolicy(bucketPolicy, requestType, const bucketPolicyPermission = checkBucketPolicy(bucketPolicy, _requestType,
canonicalID, arn, bucket.getOwner(), log, request); canonicalID, arn, bucket.getOwner(), log, request);
if (bucketPolicyPermission === 'explicitDeny') { if (bucketPolicyPermission === 'explicitDeny') {
return false; results[_requestType] = false;
return;
} }
return (aclPermission || (bucketPolicyPermission === 'allow')); // If the bucket policy returns an allow, we accept the request, as the
// IAM response here is either Allow or implicit deny.
if (bucketPolicyPermission === 'allow') {
results[_requestType] = true;
return;
}
results[_requestType] = iamAuthzResults[_requestType] === false && aclPermission;
});
// final result is true if all the results are true
return Object.keys(results).every(key => results[key] === true);
} }
function isObjAuthorized(bucket, objectMD, requestType, canonicalID, authInfo, log, request) { function isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request) {
const iamAuthzResults = {};
const result = isBucketAuthorizedNew(bucket, [requestType], canonicalID, authInfo, iamAuthzResults, log, request);
return result;
}
function isObjAuthorizedNew(bucket, objectMD, requestTypes, canonicalID, authInfo, iamAuthzResults, log, request) {
if (!Array.isArray(requestTypes)) {
// eslint-disable-next-line no-param-reassign
requestTypes = [requestTypes];
}
// By default, all missing actions are defined as allowed from IAM, to be
// backward compatible
if (iamAuthzResults === false) {
// eslint-disable-next-line no-param-reassign
iamAuthzResults = {};
}
requestTypes.forEach(requestType => {
if (iamAuthzResults[requestType] === undefined) {
// eslint-disable-next-line no-param-reassign
iamAuthzResults[requestType] = false;
}
});
const results = {};
const mainApiCall = requestTypes[0];
requestTypes.forEach(_requestType => {
const parsedMethodName = _requestType.endsWith('Version') ?
_requestType.slice(0, -7) : _requestType;
const bucketOwner = bucket.getOwner(); const bucketOwner = bucket.getOwner();
if (!objectMD) { if (!objectMD) {
// User is already authorized on the bucket for FULL_CONTROL or WRITE or // User is already authorized on the bucket for FULL_CONTROL or WRITE or
// bucket has canned ACL public-read-write // bucket has canned ACL public-read-write
if (requestType === 'objectPut' || requestType === 'objectDelete') { if (parsedMethodName === 'objectPut' || parsedMethodName === 'objectDelete') {
return true; results[_requestType] = iamAuthzResults[_requestType] === false;
return;
} }
// check bucket has read access // check bucket has read access
// 'bucketGet' covers listObjects and listMultipartUploads, bucket read actions // 'bucketGet' covers listObjects and listMultipartUploads, bucket read actions
return isBucketAuthorized(bucket, 'bucketGet', canonicalID, authInfo, log, request); results[_requestType] = isBucketAuthorized(bucket, 'bucketGet', canonicalID, authInfo, false, log, request);
return;
} }
let requesterIsNotUser = true; let requesterIsNotUser = true;
let arn = null; let arn = null;
let isUserUnauthenticated = false;
if (authInfo) { if (authInfo) {
requesterIsNotUser = !authInfo.isRequesterAnIAMUser(); requesterIsNotUser = !authInfo.isRequesterAnIAMUser();
arn = authInfo.getArn(); arn = authInfo.getArn();
isUserUnauthenticated = arn === undefined;
} }
if (objectMD['owner-id'] === canonicalID && requesterIsNotUser) { if (objectMD['owner-id'] === canonicalID && requesterIsNotUser) {
return true; results[_requestType] = iamAuthzResults[_requestType] === false;
return;
} }
// account is authorized if: // account is authorized if:
// - requesttype is included in bucketOwnerActions and // - requesttype is included in bucketOwnerActions and
// - account is the bucket owner // - account is the bucket owner
// - requester is account, not user // - requester is account, not user
if (bucketOwnerActions.includes(requestType) if (bucketOwnerActions.includes(parsedMethodName)
&& (bucketOwner === canonicalID) && (bucketOwner === canonicalID)
&& requesterIsNotUser) { && requesterIsNotUser) {
return true; results[_requestType] = iamAuthzResults[_requestType] === false;
return;
}
const aclPermission = checkObjectAcls(bucket, objectMD, parsedMethodName,
canonicalID, requesterIsNotUser, isUserUnauthenticated, mainApiCall);
const bucketPolicy = bucket.getBucketPolicy();
if (!bucketPolicy) {
results[_requestType] = iamAuthzResults[_requestType] === false && aclPermission;
return;
}
const bucketPolicyPermission = checkBucketPolicy(bucketPolicy, _requestType,
canonicalID, arn, bucket.getOwner(), log, request);
if (bucketPolicyPermission === 'explicitDeny') {
results[_requestType] = false;
return;
}
// If the bucket policy returns an allow, we accept the request, as the
// IAM response here is either Allow or implicit deny.
if (bucketPolicyPermission === 'allow') {
results[_requestType] = true;
return;
}
results[_requestType] = iamAuthzResults[_requestType] === false && aclPermission;
});
// final result is true if all the results are true
return Object.keys(results).every(key => results[key] === true);
}
function isObjAuthorized(bucket, objectMD, requestType, canonicalID, authInfo, log, request) {
const iamAuthzResults = {};
const result = isObjAuthorizedNew(bucket, objectMD,
[requestType], canonicalID, authInfo, iamAuthzResults, log, request);
return result;
}
function evaluateBucketPolicyWithIAM(bucket, requestTypes, canonicalID, authInfo, iamAuthzResults, log, request) {
if (!Array.isArray(requestTypes)) {
// eslint-disable-next-line no-param-reassign
requestTypes = [requestTypes];
}
if (iamAuthzResults === false) {
// eslint-disable-next-line no-param-reassign
iamAuthzResults = {};
}
// By default, all missing actions are defined as allowed from IAM, to be
// backward compatible
requestTypes.forEach(requestType => {
if (iamAuthzResults[requestType] === undefined) {
// eslint-disable-next-line no-param-reassign
iamAuthzResults[requestType] = false;
}
});
const results = {};
requestTypes.forEach(_requestType => {
let arn = null;
if (authInfo) {
arn = authInfo.getArn();
} }
const aclPermission = checkObjectAcls(bucket, objectMD, requestType,
canonicalID);
const bucketPolicy = bucket.getBucketPolicy(); const bucketPolicy = bucket.getBucketPolicy();
if (!bucketPolicy) { if (!bucketPolicy) {
return aclPermission; results[_requestType] = iamAuthzResults[_requestType] === false;
return;
} }
const bucketPolicyPermission = checkBucketPolicy(bucketPolicy, requestType, const bucketPolicyPermission = checkBucketPolicy(bucketPolicy, _requestType,
canonicalID, arn, bucket.getOwner(), log, request); canonicalID, arn, bucket.getOwner(), log, request);
if (bucketPolicyPermission === 'explicitDeny') { if (bucketPolicyPermission === 'explicitDeny') {
return false; results[_requestType] = false;
return;
} }
return (aclPermission || (bucketPolicyPermission === 'allow')); // If the bucket policy returns an allow, we accept the request, as the
// IAM response here is either Allow or implicit deny.
if (bucketPolicyPermission === 'allow') {
results[_requestType] = true;
return;
}
results[_requestType] = iamAuthzResults[_requestType] === false;
});
// final result is true if all the results are true
return Object.keys(results).every(key => results[key] === true);
} }
function _checkResource(resource, bucketArn) { function _checkResource(resource, bucketArn) {
@ -395,4 +554,5 @@ module.exports = {
checkObjectAcls, checkObjectAcls,
validatePolicyResource, validatePolicyResource,
isLifecycleSession, isLifecycleSession,
evaluateBucketPolicyWithIAM,
}; };

View File

@ -18,10 +18,10 @@ function bucketDeleteLifecycle(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketDeleteLifecycle', requestType: request.apiMethods || 'bucketDeleteLifecycle',
request, request,
}; };
return metadataValidateBucket(metadataValParams, log, (err, bucket) => { return metadataValidateBucket(metadataValParams, request.iamAuthzResults, log, (err, bucket) => {
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket); const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
if (err) { if (err) {
log.debug('error processing request', { log.debug('error processing request', {

View File

@ -328,7 +328,7 @@ function bucketGet(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketGet', requestType: request.apiMethods || 'bucketGet',
request, request,
}; };
const listParams = { const listParams = {
@ -351,7 +351,7 @@ function bucketGet(authInfo, request, log, callback) {
listParams.marker = params.marker; listParams.marker = params.marker;
} }
metadataValidateBucket(metadataValParams, log, (err, bucket) => { metadataValidateBucket(metadataValParams, request.iamAuthzResults, log, (err, bucket) => {
const corsHeaders = collectCorsHeaders(request.headers.origin, const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket); request.method, bucket);
if (err) { if (err) {

View File

@ -44,7 +44,7 @@ function bucketGetACL(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketGetACL', requestType: request.apiMethods || 'bucketGetACL',
request, request,
}; };
const grantInfo = { const grantInfo = {
@ -55,7 +55,7 @@ function bucketGetACL(authInfo, request, log, callback) {
}, },
}; };
metadataValidateBucket(metadataValParams, log, (err, bucket) => { metadataValidateBucket(metadataValParams, request.iamAuthzResults, log, (err, bucket) => {
const corsHeaders = collectCorsHeaders(request.headers.origin, const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket); request.method, bucket);
if (err) { if (err) {

View File

@ -39,7 +39,8 @@ function bucketGetCors(authInfo, request, log, callback) {
const corsHeaders = collectCorsHeaders(request.headers.origin, const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket); request.method, bucket);
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request)) { if (!isBucketAuthorized(bucket, request.apiMethods || requestType, canonicalID, authInfo,
request.iamAuthzResults, log, request)) {
log.debug('access denied for user on bucket', { log.debug('access denied for user on bucket', {
requestType, requestType,
method: 'bucketGetCors', method: 'bucketGetCors',

View File

@ -22,12 +22,12 @@ function bucketGetEncryption(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketGetEncryption', requestType: request.apiMethods || 'bucketGetEncryption',
request, request,
}; };
return async.waterfall([ return async.waterfall([
next => metadataValidateBucket(metadataValParams, log, next), next => metadataValidateBucket(metadataValParams, request.iamAuthzResults, log, next),
(bucket, next) => checkExpectedBucketOwner(request.headers, bucket, log, err => next(err, bucket)), (bucket, next) => checkExpectedBucketOwner(request.headers, bucket, log, err => next(err, bucket)),
(bucket, next) => { (bucket, next) => {
// If sseInfo is present but the `mandatory` flag is not set // If sseInfo is present but the `mandatory` flag is not set

View File

@ -21,10 +21,10 @@ function bucketGetLifecycle(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketGetLifecycle', requestType: request.apiMethods || 'bucketGetLifecycle',
request, request,
}; };
return metadataValidateBucket(metadataValParams, log, (err, bucket) => { return metadataValidateBucket(metadataValParams, request.iamAuthzResults, log, (err, bucket) => {
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket); const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
if (err) { if (err) {
log.debug('error processing request', { log.debug('error processing request', {

View File

@ -41,7 +41,8 @@ function bucketGetLocation(authInfo, request, log, callback) {
const corsHeaders = collectCorsHeaders(request.headers.origin, const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket); request.method, bucket);
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request)) { if (!isBucketAuthorized(bucket, request.apiMethods || requestType, canonicalID, authInfo,
request.iamAuthzResults, log, request)) {
log.debug('access denied for account on bucket', { log.debug('access denied for account on bucket', {
requestType, requestType,
method: 'bucketGetLocation', method: 'bucketGetLocation',

View File

@ -37,11 +37,11 @@ function bucketGetNotification(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketGetNotification', requestType: request.apiMethods || 'bucketGetNotification',
request, request,
}; };
return metadataValidateBucket(metadataValParams, log, (err, bucket) => { return metadataValidateBucket(metadataValParams, request.iamAuthzResults, log, (err, bucket) => {
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket); const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
if (err) { if (err) {
log.debug('error processing request', { log.debug('error processing request', {

View File

@ -33,10 +33,10 @@ function bucketGetObjectLock(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketGetObjectLock', requestType: request.apiMethods || 'bucketGetObjectLock',
request, request,
}; };
return metadataValidateBucket(metadataValParams, log, (err, bucket) => { return metadataValidateBucket(metadataValParams, request.iamAuthzResults, log, (err, bucket) => {
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket); const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
if (err) { if (err) {
log.debug('error processing request', { log.debug('error processing request', {

View File

@ -17,11 +17,11 @@ function bucketGetPolicy(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketGetPolicy', requestType: request.apiMethods || 'bucketGetPolicy',
request, request,
}; };
return metadataValidateBucket(metadataValParams, log, (err, bucket) => { return metadataValidateBucket(metadataValParams, request.iamAuthzResults, log, (err, bucket) => {
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket); const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
if (err) { if (err) {
log.debug('error processing request', { log.debug('error processing request', {

View File

@ -21,10 +21,10 @@ function bucketGetReplication(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketGetReplication', requestType: request.apiMethods || 'bucketGetReplication',
request, request,
}; };
return metadataValidateBucket(metadataValParams, log, (err, bucket) => { return metadataValidateBucket(metadataValParams, request.iamAuthzResults, log, (err, bucket) => {
const corsHeaders = collectCorsHeaders(headers.origin, method, bucket); const corsHeaders = collectCorsHeaders(headers.origin, method, bucket);
if (err) { if (err) {
log.debug('error processing request', { log.debug('error processing request', {

View File

@ -70,7 +70,7 @@ async function bucketGetTagging(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketGetTagging', requestType: request.apiMethods || 'bucketGetTagging',
request, request,
}; };
@ -78,7 +78,7 @@ async function bucketGetTagging(authInfo, request, log, callback) {
let xml = null; let xml = null;
try { try {
bucket = await metadataValidateBucketPromise(metadataValParams, log); bucket = await metadataValidateBucketPromise(metadataValParams, request.iamAuthzResults, log);
// eslint-disable-next-line no-unused-expressions // eslint-disable-next-line no-unused-expressions
await checkExpectedBucketOwnerPromise(headers, bucket, log); await checkExpectedBucketOwnerPromise(headers, bucket, log);
const tags = bucket.getTags(); const tags = bucket.getTags();

View File

@ -54,11 +54,11 @@ function bucketGetVersioning(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketGetVersioning', requestType: request.apiMethods || 'bucketGetVersioning',
request, request,
}; };
metadataValidateBucket(metadataValParams, log, (err, bucket) => { metadataValidateBucket(metadataValParams, request.iamAuthzResults, log, (err, bucket) => {
const corsHeaders = collectCorsHeaders(request.headers.origin, const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket); request.method, bucket);
if (err) { if (err) {

View File

@ -39,7 +39,8 @@ function bucketGetWebsite(authInfo, request, log, callback) {
const corsHeaders = collectCorsHeaders(request.headers.origin, const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket); request.method, bucket);
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request)) { if (!isBucketAuthorized(bucket, request.apiMethods || requestType, canonicalID, authInfo,
request.iamAuthzResults, log, request)) {
log.debug('access denied for user on bucket', { log.debug('access denied for user on bucket', {
requestType, requestType,
method: 'bucketGetWebsite', method: 'bucketGetWebsite',

View File

@ -44,7 +44,7 @@ const monitoring = require('../utilities/metrics');
function bucketPutACL(authInfo, request, log, callback) { function bucketPutACL(authInfo, request, log, callback) {
log.debug('processing request', { method: 'bucketPutACL' }); log.debug('processing request', { method: 'bucketPutACL' });
const bucketName = request.bucketName; const { bucketName } = request;
const canonicalID = authInfo.getCanonicalID(); const canonicalID = authInfo.getCanonicalID();
const newCannedACL = request.headers['x-amz-acl']; const newCannedACL = request.headers['x-amz-acl'];
const possibleCannedACL = [ const possibleCannedACL = [
@ -54,19 +54,6 @@ function bucketPutACL(authInfo, request, log, callback) {
'authenticated-read', 'authenticated-read',
'log-delivery-write', 'log-delivery-write',
]; ];
if (newCannedACL && possibleCannedACL.indexOf(newCannedACL) === -1) {
log.trace('invalid canned acl argument', {
acl: newCannedACL,
method: 'bucketPutACL',
});
monitoring.promMetrics('PUT', bucketName, 400, 'bucketPutACL');
return callback(errors.InvalidArgument);
}
if (!aclUtils.checkGrantHeaderValidity(request.headers)) {
log.trace('invalid acl header');
monitoring.promMetrics('PUT', bucketName, 400, 'bucketPutACL');
return callback(errors.InvalidArgument);
}
const possibleGroups = [constants.allAuthedUsersId, const possibleGroups = [constants.allAuthedUsersId,
constants.publicId, constants.publicId,
constants.logId, constants.logId,
@ -74,7 +61,7 @@ function bucketPutACL(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketPutACL', requestType: request.apiMethods || 'bucketPutACL',
request, request,
}; };
const possibleGrants = ['FULL_CONTROL', 'WRITE', const possibleGrants = ['FULL_CONTROL', 'WRITE',
@ -88,34 +75,43 @@ function bucketPutACL(authInfo, request, log, callback) {
READ_ACP: [], READ_ACP: [],
}; };
const grantReadHeader = const grantReadHeader = aclUtils.parseGrant(request.headers[
aclUtils.parseGrant(request.headers[ 'x-amz-grant-read'], 'READ');
'x-amz-grant-read'], 'READ'); const grantWriteHeader = aclUtils.parseGrant(request.headers['x-amz-grant-write'], 'WRITE');
const grantWriteHeader = const grantReadACPHeader = aclUtils.parseGrant(request.headers['x-amz-grant-read-acp'],
aclUtils.parseGrant(request.headers['x-amz-grant-write'], 'WRITE'); 'READ_ACP');
const grantReadACPHeader = const grantWriteACPHeader = aclUtils.parseGrant(request.headers['x-amz-grant-write-acp'],
aclUtils.parseGrant(request.headers['x-amz-grant-read-acp'], 'WRITE_ACP');
'READ_ACP'); const grantFullControlHeader = aclUtils.parseGrant(request.headers['x-amz-grant-full-control'],
const grantWriteACPHeader = 'FULL_CONTROL');
aclUtils.parseGrant(request.headers['x-amz-grant-write-acp'],
'WRITE_ACP');
const grantFullControlHeader =
aclUtils.parseGrant(request.headers['x-amz-grant-full-control'],
'FULL_CONTROL');
return async.waterfall([ return async.waterfall([
function waterfall1(next) { function waterfall1(next) {
metadataValidateBucket(metadataValParams, log, metadataValidateBucket(metadataValParams, request.iamAuthzResults, log,
(err, bucket) => { (err, bucket) => {
if (err) { if (err) {
log.trace('request authorization failed', { log.trace('request authorization failed', {
error: err, error: err,
method: 'metadataValidateBucket', method: 'metadataValidateBucket',
}); });
return next(err, bucket); return next(err, bucket);
} }
return next(null, bucket); // if the API call is allowed, ensure that the parameters are valid
}); if (newCannedACL && possibleCannedACL.indexOf(newCannedACL) === -1) {
log.trace('invalid canned acl argument', {
acl: newCannedACL,
method: 'bucketPutACL',
});
monitoring.promMetrics('PUT', bucketName, 400, 'bucketPutACL');
return next(errors.InvalidArgument);
}
if (!aclUtils.checkGrantHeaderValidity(request.headers)) {
log.trace('invalid acl header');
monitoring.promMetrics('PUT', bucketName, 400, 'bucketPutACL');
return next(errors.InvalidArgument);
}
return next(null, bucket);
});
}, },
function waterfall2(bucket, next) { function waterfall2(bucket, next) {
// If not setting acl through headers, parse body // If not setting acl through headers, parse body
@ -182,7 +178,7 @@ function bucketPutACL(authInfo, request, log, callback) {
if (!skip && granteeType === 'Group') { if (!skip && granteeType === 'Group') {
if (possibleGroups.indexOf(grantee.URI[0]) < 0) { if (possibleGroups.indexOf(grantee.URI[0]) < 0) {
log.trace('invalid user group', log.trace('invalid user group',
{ userGroup: grantee.URI[0] }); { userGroup: grantee.URI[0] });
return next(errors.InvalidArgument, bucket); return next(errors.InvalidArgument, bucket);
} }
return usersIdentifiedByGroup.push({ return usersIdentifiedByGroup.push({
@ -196,22 +192,23 @@ function bucketPutACL(authInfo, request, log, callback) {
} else { } else {
// If no canned ACL and no parsed xml, loop // If no canned ACL and no parsed xml, loop
// through the access headers // through the access headers
const allGrantHeaders = const allGrantHeaders = [].concat(grantReadHeader, grantWriteHeader,
[].concat(grantReadHeader, grantWriteHeader,
grantReadACPHeader, grantWriteACPHeader, grantReadACPHeader, grantWriteACPHeader,
grantFullControlHeader); grantFullControlHeader);
usersIdentifiedByEmail = allGrantHeaders.filter(item => usersIdentifiedByEmail = allGrantHeaders.filter(item => item
item && item.userIDType.toLowerCase() === 'emailaddress'); && item.userIDType.toLowerCase() === 'emailaddress');
usersIdentifiedByGroup = allGrantHeaders usersIdentifiedByGroup = allGrantHeaders
.filter(itm => itm && itm.userIDType .filter(itm => itm && itm.userIDType
.toLowerCase() === 'uri'); .toLowerCase() === 'uri');
for (let i = 0; i < usersIdentifiedByGroup.length; i++) { for (let i = 0; i < usersIdentifiedByGroup.length; i++) {
const userGroup = usersIdentifiedByGroup[i].identifier; const userGroup = usersIdentifiedByGroup[i].identifier;
if (possibleGroups.indexOf(userGroup) < 0) { if (possibleGroups.indexOf(userGroup) < 0) {
log.trace('invalid user group', { userGroup, log.trace('invalid user group', {
method: 'bucketPutACL' }); userGroup,
method: 'bucketPutACL',
});
return next(errors.InvalidArgument, bucket); return next(errors.InvalidArgument, bucket);
} }
} }
@ -246,8 +243,8 @@ function bucketPutACL(authInfo, request, log, callback) {
return vault.getCanonicalIds(justEmails, log, return vault.getCanonicalIds(justEmails, log,
(err, results) => { (err, results) => {
if (err) { if (err) {
log.trace('error looking up canonical ids', { log.trace('error looking up canonical ids',
error: err, method: 'vault.getCanonicalIDs' }); { error: err, method: 'vault.getCanonicalIDs' });
return next(err, bucket); return next(err, bucket);
} }
const reconstructedUsersIdentifiedByEmail = aclUtils const reconstructedUsersIdentifiedByEmail = aclUtils
@ -256,7 +253,8 @@ function bucketPutACL(authInfo, request, log, callback) {
const allUsers = [].concat( const allUsers = [].concat(
reconstructedUsersIdentifiedByEmail, reconstructedUsersIdentifiedByEmail,
usersIdentifiedByID, usersIdentifiedByID,
usersIdentifiedByGroup); usersIdentifiedByGroup,
);
const revisedAddACLParams = aclUtils const revisedAddACLParams = aclUtils
.sortHeaderGrants(allUsers, addACLParams); .sortHeaderGrants(allUsers, addACLParams);
return next(null, bucket, revisedAddACLParams); return next(null, bucket, revisedAddACLParams);
@ -264,9 +262,9 @@ function bucketPutACL(authInfo, request, log, callback) {
} }
const allUsers = [].concat( const allUsers = [].concat(
usersIdentifiedByID, usersIdentifiedByID,
usersIdentifiedByGroup); usersIdentifiedByGroup,
const revisedAddACLParams = );
aclUtils.sortHeaderGrants(allUsers, addACLParams); const revisedAddACLParams = aclUtils.sortHeaderGrants(allUsers, addACLParams);
return next(null, bucket, revisedAddACLParams); return next(null, bucket, revisedAddACLParams);
}, },
function waterfall4(bucket, addACLParams, next) { function waterfall4(bucket, addACLParams, next) {
@ -277,19 +275,19 @@ function bucketPutACL(authInfo, request, log, callback) {
if (bucket.hasTransientFlag() || bucket.hasDeletedFlag()) { if (bucket.hasTransientFlag() || bucket.hasDeletedFlag()) {
log.trace('transient or deleted flag so cleaning up bucket'); log.trace('transient or deleted flag so cleaning up bucket');
bucket.setFullAcl(addACLParams); bucket.setFullAcl(addACLParams);
return cleanUpBucket(bucket, canonicalID, log, err => return cleanUpBucket(bucket, canonicalID, log, err => next(err, bucket));
next(err, bucket));
} }
// If no bucket flags, just add acl's to bucket metadata // If no bucket flags, just add acl's to bucket metadata
return acl.addACL(bucket, addACLParams, log, err => return acl.addACL(bucket, addACLParams, log, err => next(err, bucket));
next(err, bucket));
}, },
], (err, bucket) => { ], (err, bucket) => {
const corsHeaders = collectCorsHeaders(request.headers.origin, const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket); request.method, bucket);
if (err) { if (err) {
log.trace('error processing request', { error: err, log.trace('error processing request', {
method: 'bucketPutACL' }); error: err,
method: 'bucketPutACL',
});
monitoring.promMetrics('PUT', bucketName, err.code, 'bucketPutACL'); monitoring.promMetrics('PUT', bucketName, err.code, 'bucketPutACL');
} else { } else {
pushMetric('putBucketAcl', log, { pushMetric('putBucketAcl', log, {

View File

@ -4,8 +4,7 @@ const { errors } = require('arsenal');
const bucketShield = require('./apiUtils/bucket/bucketShield'); const bucketShield = require('./apiUtils/bucket/bucketShield');
const collectCorsHeaders = require('../utilities/collectCorsHeaders'); const collectCorsHeaders = require('../utilities/collectCorsHeaders');
const { isBucketAuthorized } = const { isBucketAuthorized } = require('./apiUtils/authorization/permissionChecks');
require('./apiUtils/authorization/permissionChecks');
const metadata = require('../metadata/wrapper'); const metadata = require('../metadata/wrapper');
const { parseCorsXml } = require('./apiUtils/bucket/bucketCors'); const { parseCorsXml } = require('./apiUtils/bucket/bucketCors');
const { pushMetric } = require('../utapi/utilities'); const { pushMetric } = require('../utapi/utilities');
@ -23,12 +22,12 @@ const requestType = 'bucketPutCors';
*/ */
function bucketPutCors(authInfo, request, log, callback) { function bucketPutCors(authInfo, request, log, callback) {
log.debug('processing request', { method: 'bucketPutCors' }); log.debug('processing request', { method: 'bucketPutCors' });
const bucketName = request.bucketName; const { bucketName } = request;
const canonicalID = authInfo.getCanonicalID(); const canonicalID = authInfo.getCanonicalID();
if (!request.post) { if (!request.post) {
log.debug('CORS xml body is missing', log.debug('CORS xml body is missing',
{ error: errors.MissingRequestBodyError }); { error: errors.MissingRequestBodyError });
monitoring.promMetrics('PUT', bucketName, 400, 'putBucketCors'); monitoring.promMetrics('PUT', bucketName, 400, 'putBucketCors');
return callback(errors.MissingRequestBodyError); return callback(errors.MissingRequestBodyError);
} }
@ -70,7 +69,8 @@ function bucketPutCors(authInfo, request, log, callback) {
}); });
}, },
function validateBucketAuthorization(bucket, rules, corsHeaders, next) { function validateBucketAuthorization(bucket, rules, corsHeaders, next) {
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request)) { if (!isBucketAuthorized(bucket, request.apiMethods || requestType, canonicalID, authInfo,
request.iamAuthzResults, log, request)) {
log.debug('access denied for account on bucket', { log.debug('access denied for account on bucket', {
requestType, requestType,
}); });
@ -81,13 +81,14 @@ function bucketPutCors(authInfo, request, log, callback) {
function updateBucketMetadata(bucket, rules, corsHeaders, next) { function updateBucketMetadata(bucket, rules, corsHeaders, next) {
log.trace('updating bucket cors rules in metadata'); log.trace('updating bucket cors rules in metadata');
bucket.setCors(rules); bucket.setCors(rules);
metadata.updateBucket(bucketName, bucket, log, err => metadata.updateBucket(bucketName, bucket, log, err => next(err, corsHeaders));
next(err, corsHeaders));
}, },
], (err, corsHeaders) => { ], (err, corsHeaders) => {
if (err) { if (err) {
log.trace('error processing request', { error: err, log.trace('error processing request', {
method: 'bucketPutCors' }); error: err,
method: 'bucketPutCors',
});
monitoring.promMetrics('PUT', bucketName, err.code, monitoring.promMetrics('PUT', bucketName, err.code,
'putBucketCors'); 'putBucketCors');
} }

View File

@ -18,17 +18,17 @@ const collectCorsHeaders = require('../utilities/collectCorsHeaders');
*/ */
function bucketPutEncryption(authInfo, request, log, callback) { function bucketPutEncryption(authInfo, request, log, callback) {
const bucketName = request.bucketName; const { bucketName } = request;
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketPutEncryption', requestType: request.apiMethods || 'bucketPutEncryption',
request, request,
}; };
return async.waterfall([ return async.waterfall([
next => metadataValidateBucket(metadataValParams, log, next), next => metadataValidateBucket(metadataValParams, request.iamAuthzResults, log, next),
(bucket, next) => checkExpectedBucketOwner(request.headers, bucket, log, err => next(err, bucket)), (bucket, next) => checkExpectedBucketOwner(request.headers, bucket, log, err => next(err, bucket)),
(bucket, next) => { (bucket, next) => {
log.trace('parsing encryption config', { method: 'bucketPutEncryption' }); log.trace('parsing encryption config', { method: 'bucketPutEncryption' });

View File

@ -1,7 +1,6 @@
const { waterfall } = require('async'); const { waterfall } = require('async');
const uuid = require('uuid/v4'); const uuid = require('uuid/v4');
const LifecycleConfiguration = const { LifecycleConfiguration } = require('arsenal').models;
require('arsenal').models.LifecycleConfiguration;
const parseXML = require('../utilities/parseXML'); const parseXML = require('../utilities/parseXML');
const collectCorsHeaders = require('../utilities/collectCorsHeaders'); const collectCorsHeaders = require('../utilities/collectCorsHeaders');
@ -22,11 +21,11 @@ const monitoring = require('../utilities/metrics');
function bucketPutLifecycle(authInfo, request, log, callback) { function bucketPutLifecycle(authInfo, request, log, callback) {
log.debug('processing request', { method: 'bucketPutLifecycle' }); log.debug('processing request', { method: 'bucketPutLifecycle' });
const bucketName = request.bucketName; const { bucketName } = request;
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketPutLifecycle', requestType: request.apiMethods || 'bucketPutLifecycle',
request, request,
}; };
return waterfall([ return waterfall([
@ -43,7 +42,7 @@ function bucketPutLifecycle(authInfo, request, log, callback) {
return next(null, configObj); return next(null, configObj);
}); });
}, },
(lcConfig, next) => metadataValidateBucket(metadataValParams, log, (lcConfig, next) => metadataValidateBucket(metadataValParams, request.iamAuthzResults, log,
(err, bucket) => { (err, bucket) => {
if (err) { if (err) {
return next(err, bucket); return next(err, bucket);
@ -55,17 +54,19 @@ function bucketPutLifecycle(authInfo, request, log, callback) {
bucket.setUid(uuid()); bucket.setUid(uuid());
} }
bucket.setLifecycleConfiguration(lcConfig); bucket.setLifecycleConfiguration(lcConfig);
metadata.updateBucket(bucket.getName(), bucket, log, err => metadata.updateBucket(bucket.getName(), bucket, log, err => next(err, bucket));
next(err, bucket));
}, },
], (err, bucket) => { ], (err, bucket) => {
const corsHeaders = collectCorsHeaders(request.headers.origin, const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket); request.method, bucket);
if (err) { if (err) {
log.trace('error processing request', { error: err, log.trace('error processing request', {
method: 'bucketPutLifecycle' }); error: err,
method: 'bucketPutLifecycle',
});
monitoring.promMetrics( monitoring.promMetrics(
'PUT', bucketName, err.code, 'putBucketLifecycle'); 'PUT', bucketName, err.code, 'putBucketLifecycle',
);
return callback(err, corsHeaders); return callback(err, corsHeaders);
} }
pushMetric('putBucketLifecycle', log, { pushMetric('putBucketLifecycle', log, {

View File

@ -19,11 +19,11 @@ const { pushMetric } = require('../utapi/utilities');
function bucketPutNotification(authInfo, request, log, callback) { function bucketPutNotification(authInfo, request, log, callback) {
log.debug('processing request', { method: 'bucketPutNotification' }); log.debug('processing request', { method: 'bucketPutNotification' });
const bucketName = request.bucketName; const { bucketName } = request;
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketPutNotification', requestType: request.apiMethods || 'bucketPutNotification',
request, request,
}; };
@ -34,7 +34,7 @@ function bucketPutNotification(authInfo, request, log, callback) {
const notifConfig = notificationConfig.error ? undefined : notificationConfig; const notifConfig = notificationConfig.error ? undefined : notificationConfig;
process.nextTick(() => next(notificationConfig.error, notifConfig)); process.nextTick(() => next(notificationConfig.error, notifConfig));
}, },
(notifConfig, next) => metadataValidateBucket(metadataValParams, log, (notifConfig, next) => metadataValidateBucket(metadataValParams, request.iamAuthzResults, log,
(err, bucket) => next(err, bucket, notifConfig)), (err, bucket) => next(err, bucket, notifConfig)),
(bucket, notifConfig, next) => { (bucket, notifConfig, next) => {
bucket.setNotificationConfiguration(notifConfig); bucket.setNotificationConfiguration(notifConfig);
@ -45,8 +45,10 @@ function bucketPutNotification(authInfo, request, log, callback) {
const corsHeaders = collectCorsHeaders(request.headers.origin, const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket); request.method, bucket);
if (err) { if (err) {
log.trace('error processing request', { error: err, log.trace('error processing request', {
method: 'bucketPutNotification' }); error: err,
method: 'bucketPutNotification',
});
return callback(err, corsHeaders); return callback(err, corsHeaders);
} }
pushMetric('putBucketNotification', log, { pushMetric('putBucketNotification', log, {

View File

@ -1,8 +1,8 @@
const { waterfall } = require('async'); const { waterfall } = require('async');
const arsenal = require('arsenal'); const arsenal = require('arsenal');
const errors = arsenal.errors; const { errors } = arsenal;
const ObjectLockConfiguration = arsenal.models.ObjectLockConfiguration; const { ObjectLockConfiguration } = arsenal.models;
const parseXML = require('../utilities/parseXML'); const parseXML = require('../utilities/parseXML');
const collectCorsHeaders = require('../utilities/collectCorsHeaders'); const collectCorsHeaders = require('../utilities/collectCorsHeaders');
@ -22,11 +22,11 @@ const { pushMetric } = require('../utapi/utilities');
function bucketPutObjectLock(authInfo, request, log, callback) { function bucketPutObjectLock(authInfo, request, log, callback) {
log.debug('processing request', { method: 'bucketPutObjectLock' }); log.debug('processing request', { method: 'bucketPutObjectLock' });
const bucketName = request.bucketName; const { bucketName } = request;
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketPutObjectLock', requestType: request.apiMethods || 'bucketPutObjectLock',
request, request,
}; };
return waterfall([ return waterfall([
@ -36,12 +36,12 @@ function bucketPutObjectLock(authInfo, request, log, callback) {
// if there was an error getting object lock configuration, // if there was an error getting object lock configuration,
// returned configObj will contain 'error' key // returned configObj will contain 'error' key
process.nextTick(() => { process.nextTick(() => {
const configObj = lockConfigClass. const configObj = lockConfigClass
getValidatedObjectLockConfiguration(); .getValidatedObjectLockConfiguration();
return next(configObj.error || null, configObj); return next(configObj.error || null, configObj);
}); });
}, },
(objectLockConfig, next) => metadataValidateBucket(metadataValParams, (objectLockConfig, next) => metadataValidateBucket(metadataValParams, request.iamAuthzResults,
log, (err, bucket) => { log, (err, bucket) => {
if (err) { if (err) {
return next(err, bucket); return next(err, bucket);
@ -53,23 +53,25 @@ function bucketPutObjectLock(authInfo, request, log, callback) {
process.nextTick(() => { process.nextTick(() => {
if (!isObjectLockEnabled) { if (!isObjectLockEnabled) {
return next(errors.InvalidBucketState.customizeDescription( return next(errors.InvalidBucketState.customizeDescription(
'Object Lock configuration cannot be enabled on ' + 'Object Lock configuration cannot be enabled on '
'existing buckets'), bucket); + 'existing buckets',
), bucket);
} }
return next(null, bucket, objectLockConfig); return next(null, bucket, objectLockConfig);
}); });
}, },
(bucket, objectLockConfig, next) => { (bucket, objectLockConfig, next) => {
bucket.setObjectLockConfiguration(objectLockConfig); bucket.setObjectLockConfiguration(objectLockConfig);
metadata.updateBucket(bucket.getName(), bucket, log, err => metadata.updateBucket(bucket.getName(), bucket, log, err => next(err, bucket));
next(err, bucket));
}, },
], (err, bucket) => { ], (err, bucket) => {
const corsHeaders = collectCorsHeaders(request.headers.origin, const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket); request.method, bucket);
if (err) { if (err) {
log.trace('error processing request', { error: err, log.trace('error processing request', {
method: 'bucketPutObjectLock' }); error: err,
method: 'bucketPutObjectLock',
});
return callback(err, corsHeaders); return callback(err, corsHeaders);
} }
pushMetric('putBucketObjectLock', log, { pushMetric('putBucketObjectLock', log, {

View File

@ -17,8 +17,7 @@ const { BucketPolicy } = models;
function _checkNotImplementedPolicy(policyString) { function _checkNotImplementedPolicy(policyString) {
// bucket names and key names cannot include "", so including those // bucket names and key names cannot include "", so including those
// isolates not implemented keys // isolates not implemented keys
return policyString.includes('"Condition"') return policyString.includes('"Service"')
|| policyString.includes('"Service"')
|| policyString.includes('"Federated"'); || policyString.includes('"Federated"');
} }
@ -37,7 +36,7 @@ function bucketPutPolicy(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketPutPolicy', requestType: request.apiMethods || 'bucketPutPolicy',
request, request,
}; };
@ -70,7 +69,7 @@ function bucketPutPolicy(authInfo, request, log, callback) {
return next(null, bucketPolicy); return next(null, bucketPolicy);
}); });
}, },
(bucketPolicy, next) => metadataValidateBucket(metadataValParams, log, (bucketPolicy, next) => metadataValidateBucket(metadataValParams, request.iamAuthzResults, log,
(err, bucket) => { (err, bucket) => {
if (err) { if (err) {
return next(err, bucket); return next(err, bucket);

View File

@ -28,7 +28,7 @@ function bucketPutReplication(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketPutReplication', requestType: request.apiMethods || 'bucketPutReplication',
request, request,
}; };
return waterfall([ return waterfall([
@ -37,7 +37,7 @@ function bucketPutReplication(authInfo, request, log, callback) {
// Check bucket user privileges and ensure versioning is 'Enabled'. // Check bucket user privileges and ensure versioning is 'Enabled'.
(config, next) => (config, next) =>
// TODO: Validate that destination bucket exists and has versioning. // TODO: Validate that destination bucket exists and has versioning.
metadataValidateBucket(metadataValParams, log, (err, bucket) => { metadataValidateBucket(metadataValParams, request.iamAuthzResults, log, (err, bucket) => {
if (err) { if (err) {
return next(err); return next(err);
} }

View File

@ -38,11 +38,11 @@ function bucketPutTagging(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketPutTagging', requestType: request.apiMethods || 'bucketPutTagging',
}; };
let bucket = null; let bucket = null;
return waterfall([ return waterfall([
next => metadataValidateBucket(metadataValParams, log, next => metadataValidateBucket(metadataValParams, request.iamAuthzResults, log,
(err, b) => { (err, b) => {
bucket = b; bucket = b;
return next(err); return next(err);

View File

@ -88,13 +88,13 @@ function bucketPutVersioning(authInfo, request, log, callback) {
const metadataValParams = { const metadataValParams = {
authInfo, authInfo,
bucketName, bucketName,
requestType: 'bucketPutVersioning', requestType: request.apiMethods || 'bucketPutVersioning',
request, request,
}; };
return waterfall([ return waterfall([
next => _parseXML(request, log, next), next => _parseXML(request, log, next),
next => metadataValidateBucket(metadataValParams, log, next => metadataValidateBucket(metadataValParams, request.iamAuthzResults, log,
(err, bucket) => next(err, bucket)), // ignore extra null object, (err, bucket) => next(err, bucket)), // ignore extra null object,
(bucket, next) => parseString(request.post, (err, result) => { (bucket, next) => parseString(request.post, (err, result) => {
// just for linting; there should not be any parsing error here // just for linting; there should not be any parsing error here

View File

@ -49,7 +49,8 @@ function bucketPutWebsite(authInfo, request, log, callback) {
}); });
}, },
function validateBucketAuthorization(bucket, config, next) { function validateBucketAuthorization(bucket, config, next) {
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request)) { if (!isBucketAuthorized(bucket, request.apiMethods || requestType, canonicalID, authInfo,
request.iamAuthzResults, log, request)) {
log.debug('access denied for user on bucket', { log.debug('access denied for user on bucket', {
requestType, requestType,
method: 'bucketPutWebsite', method: 'bucketPutWebsite',

View File

@ -46,11 +46,11 @@ function objectGet(authInfo, request, returnTagCount, log, callback) {
objectKey, objectKey,
versionId, versionId,
getDeleteMarker: true, getDeleteMarker: true,
requestType: 'objectGet', requestType: request.apiMethods || 'objectGet',
request, request,
}; };
return metadataValidateBucketAndObj(mdValParams, log, return metadataValidateBucketAndObj(mdValParams, request.iamAuthzResults, log,
(err, bucket, objMD) => { (err, bucket, objMD) => {
const corsHeaders = collectCorsHeaders(request.headers.origin, const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket); request.method, bucket);

View File

@ -61,7 +61,7 @@ function objectGetACL(authInfo, request, log, callback) {
bucketName, bucketName,
objectKey, objectKey,
versionId, versionId,
requestType: 'objectGetACL', requestType: request.apiMethods || 'objectGetACL',
request, request,
}; };
const grantInfo = { const grantInfo = {
@ -74,7 +74,7 @@ function objectGetACL(authInfo, request, log, callback) {
return async.waterfall([ return async.waterfall([
function validateBucketAndObj(next) { function validateBucketAndObj(next) {
return metadataValidateBucketAndObj(metadataValParams, log, return metadataValidateBucketAndObj(metadataValParams, request.iamAuthzResults, log,
(err, bucket, objectMD) => { (err, bucket, objectMD) => {
if (err) { if (err) {
log.trace('request authorization failed', log.trace('request authorization failed',

View File

@ -40,12 +40,12 @@ function objectGetLegalHold(authInfo, request, log, callback) {
bucketName, bucketName,
objectKey, objectKey,
versionId, versionId,
requestType: 'objectGetLegalHold', requestType: request.apiMethods || 'objectGetLegalHold',
request, request,
}; };
return async.waterfall([ return async.waterfall([
next => metadataValidateBucketAndObj(metadataValParams, log, next => metadataValidateBucketAndObj(metadataValParams, request.iamAuthzResults, log,
(err, bucket, objectMD) => { (err, bucket, objectMD) => {
if (err) { if (err) {
log.trace('request authorization failed', log.trace('request authorization failed',

View File

@ -40,12 +40,12 @@ function objectGetRetention(authInfo, request, log, callback) {
bucketName, bucketName,
objectKey, objectKey,
versionId: reqVersionId, versionId: reqVersionId,
requestType: 'objectGetRetention', requestType: request.apiMethods || 'objectGetRetention',
request, request,
}; };
return async.waterfall([ return async.waterfall([
next => metadataValidateBucketAndObj(metadataValParams, log, next => metadataValidateBucketAndObj(metadataValParams, request.iamAuthzResults, log,
(err, bucket, objectMD) => { (err, bucket, objectMD) => {
if (err) { if (err) {
log.trace('request authorization failed', log.trace('request authorization failed',

View File

@ -41,12 +41,11 @@ function objectGetTagging(authInfo, request, log, callback) {
bucketName, bucketName,
objectKey, objectKey,
versionId: reqVersionId, versionId: reqVersionId,
requestType: 'objectGetTagging', requestType: request.apiMethods || 'objectGetTagging',
request, request,
}; };
return async.waterfall([ return async.waterfall([
next => metadataValidateBucketAndObj(metadataValParams, log, next => metadataValidateBucketAndObj(metadataValParams, request.iamAuthzResults, log,
(err, bucket, objectMD) => { (err, bucket, objectMD) => {
if (err) { if (err) {
log.trace('request authorization failed', log.trace('request authorization failed',

View File

@ -16,6 +16,7 @@ const { config } = require('../Config');
const { setExpirationHeaders } = require('./apiUtils/object/expirationHeaders'); const { setExpirationHeaders } = require('./apiUtils/object/expirationHeaders');
const monitoring = require('../utilities/metrics'); const monitoring = require('../utilities/metrics');
const writeContinue = require('../utilities/writeContinue'); const writeContinue = require('../utilities/writeContinue');
const versionIdUtils = versioning.VersionID; const versionIdUtils = versioning.VersionID;
/** /**
@ -59,7 +60,7 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
} }
const invalidSSEError = errors.InvalidArgument.customizeDescription( const invalidSSEError = errors.InvalidArgument.customizeDescription(
'The encryption method specified is not supported'); 'The encryption method specified is not supported');
const requestType = 'objectPut'; const requestType = request.apiMethods || 'objectPut';
const valParams = { authInfo, bucketName, objectKey, requestType, request }; const valParams = { authInfo, bucketName, objectKey, requestType, request };
const canonicalID = authInfo.getCanonicalID(); const canonicalID = authInfo.getCanonicalID();
@ -70,132 +71,130 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
} }
log.trace('owner canonicalID to send to data', { canonicalID }); log.trace('owner canonicalID to send to data', { canonicalID });
return metadataValidateBucketAndObj(valParams, request.iamAuthzResults, log,
return metadataValidateBucketAndObj(valParams, log, (err, bucket, objMD) => {
(err, bucket, objMD) => { const responseHeaders = collectCorsHeaders(headers.origin,
const responseHeaders = collectCorsHeaders(headers.origin, method, bucket);
method, bucket);
if (err) {
log.trace('error processing request', {
error: err,
method: 'metadataValidateBucketAndObj',
});
monitoring.promMetrics('PUT', bucketName, err.code, 'putObject');
return callback(err, responseHeaders);
}
if (bucket.hasDeletedFlag() && canonicalID !== bucket.getOwner()) {
log.trace('deleted flag on bucket and request ' +
'from non-owner account');
monitoring.promMetrics('PUT', bucketName, 404, 'putObject');
return callback(errors.NoSuchBucket);
}
return async.waterfall([
function handleTransientOrDeleteBuckets(next) {
if (bucket.hasTransientFlag() || bucket.hasDeletedFlag()) {
return cleanUpBucket(bucket, canonicalID, log, next);
}
return next();
},
function getSSEConfig(next) {
return getObjectSSEConfiguration(headers, bucket, log,
(err, sseConfig) => {
if (err) {
log.error('error getting server side encryption config', { err });
return next(invalidSSEError);
}
return next(null, sseConfig);
}
);
},
function createCipherBundle(serverSideEncryptionConfig, next) {
if (serverSideEncryptionConfig) {
return kms.createCipherBundle(
serverSideEncryptionConfig, log, next);
}
return next(null, null);
},
function objectCreateAndStore(cipherBundle, next) {
const objectLockValidationError
= validateHeaders(bucket, headers, log);
if (objectLockValidationError) {
return next(objectLockValidationError);
}
writeContinue(request, request._response);
return createAndStoreObject(bucketName,
bucket, objectKey, objMD, authInfo, canonicalID, cipherBundle,
request, false, streamingV4Params, log, next);
},
], (err, storingResult) => {
if (err) { if (err) {
monitoring.promMetrics('PUT', bucketName, err.code, log.trace('error processing request', {
'putObject'); error: err,
method: 'metadataValidateBucketAndObj',
});
monitoring.promMetrics('PUT', bucketName, err.code, 'putObject');
return callback(err, responseHeaders); return callback(err, responseHeaders);
} }
// ingestSize assumes that these custom headers indicate if (bucket.hasDeletedFlag() && canonicalID !== bucket.getOwner()) {
// an ingestion PUT which is a metadata only operation. log.trace('deleted flag on bucket and request ' +
// Since these headers can be modified client side, they 'from non-owner account');
// should be used with caution if needed for precise monitoring.promMetrics('PUT', bucketName, 404, 'putObject');
// metrics. return callback(errors.NoSuchBucket);
const ingestSize = (request.headers['x-amz-meta-mdonly'] }
&& !Number.isNaN(request.headers['x-amz-meta-size']))
? Number.parseInt(request.headers['x-amz-meta-size'], 10) : null;
const newByteLength = parsedContentLength;
setExpirationHeaders(responseHeaders, { return async.waterfall([
lifecycleConfig: bucket.getLifecycleConfiguration(), function handleTransientOrDeleteBuckets(next) {
objectParams: { if (bucket.hasTransientFlag() || bucket.hasDeletedFlag()) {
key: objectKey, return cleanUpBucket(bucket, canonicalID, log, next);
date: storingResult.lastModified, }
tags: storingResult.tags, return next();
}, },
}); function getSSEConfig(next) {
return getObjectSSEConfiguration(headers, bucket, log,
// Utapi expects null or a number for oldByteLength: (err, sseConfig) => {
// * null - new object if (err) {
// * 0 or > 0 - existing object with content-length 0 or > 0 log.error('error getting server side encryption config', { err });
// objMD here is the master version that we would return next(invalidSSEError);
// have overwritten if there was an existing version or object }
// return next(null, sseConfig);
// TODO: Handle utapi metrics for null version overwrites. }
const oldByteLength = objMD && objMD['content-length'] );
!== undefined ? objMD['content-length'] : null; },
if (storingResult) { function createCipherBundle(serverSideEncryptionConfig, next) {
// ETag's hex should always be enclosed in quotes if (serverSideEncryptionConfig) {
responseHeaders.ETag = `"${storingResult.contentMD5}"`; return kms.createCipherBundle(
} serverSideEncryptionConfig, log, next);
const vcfg = bucket.getVersioningConfiguration(); }
const isVersionedObj = vcfg && vcfg.Status === 'Enabled'; return next(null, null);
if (isVersionedObj) { },
if (storingResult && storingResult.versionId) { function objectCreateAndStore(cipherBundle, next) {
responseHeaders['x-amz-version-id'] = const objectLockValidationError
versionIdUtils.encode(storingResult.versionId, = validateHeaders(bucket, headers, log);
config.versionIdEncodingType); if (objectLockValidationError) {
return next(objectLockValidationError);
}
writeContinue(request, request._response);
return createAndStoreObject(bucketName,
bucket, objectKey, objMD, authInfo, canonicalID, cipherBundle,
request, false, streamingV4Params, log, next);
},
], (err, storingResult) => {
if (err) {
monitoring.promMetrics('PUT', bucketName, err.code,
'putObject');
return callback(err, responseHeaders);
} }
} // ingestSize assumes that these custom headers indicate
// an ingestion PUT which is a metadata only operation.
// Since these headers can be modified client side, they
// should be used with caution if needed for precise
// metrics.
const ingestSize = (request.headers['x-amz-meta-mdonly']
&& !Number.isNaN(request.headers['x-amz-meta-size']))
? Number.parseInt(request.headers['x-amz-meta-size'], 10) : null;
const newByteLength = parsedContentLength;
// Only pre-existing non-versioned objects get 0 all others use 1 setExpirationHeaders(responseHeaders, {
const numberOfObjects = !isVersionedObj && oldByteLength !== null ? 0 : 1; lifecycleConfig: bucket.getLifecycleConfiguration(),
objectParams: {
key: objectKey,
date: storingResult.lastModified,
tags: storingResult.tags,
},
});
// only the bucket owner's metrics should be updated, regardless of // Utapi expects null or a number for oldByteLength:
// who the requester is // * null - new object
pushMetric('putObject', log, { // * 0 or > 0 - existing object with content-length 0 or > 0
authInfo, // objMD here is the master version that we would
canonicalID: bucket.getOwner(), // have overwritten if there was an existing version or object
bucket: bucketName, //
keys: [objectKey], // TODO: Handle utapi metrics for null version overwrites.
newByteLength, const oldByteLength = objMD && objMD['content-length']
oldByteLength: isVersionedObj ? null : oldByteLength, !== undefined ? objMD['content-length'] : null;
versionId: isVersionedObj && storingResult ? storingResult.versionId : undefined, if (storingResult) {
location: bucket.getLocationConstraint(), // ETag's hex should always be enclosed in quotes
numberOfObjects, responseHeaders.ETag = `"${storingResult.contentMD5}"`;
}
const vcfg = bucket.getVersioningConfiguration();
const isVersionedObj = vcfg && vcfg.Status === 'Enabled';
if (isVersionedObj) {
if (storingResult && storingResult.versionId) {
responseHeaders['x-amz-version-id'] = versionIdUtils.encode(storingResult.versionId,
config.versionIdEncodingType);
}
}
// Only pre-existing non-versioned objects get 0 all others use 1
const numberOfObjects = !isVersionedObj && oldByteLength !== null ? 0 : 1;
// only the bucket owner's metrics should be updated, regardless of
// who the requester is
pushMetric('putObject', log, {
authInfo,
canonicalID: bucket.getOwner(),
bucket: bucketName,
keys: [objectKey],
newByteLength,
oldByteLength: isVersionedObj ? null : oldByteLength,
versionId: isVersionedObj && storingResult ? storingResult.versionId : undefined,
location: bucket.getLocationConstraint(),
numberOfObjects,
});
monitoring.promMetrics('PUT', bucketName, '200',
'putObject', newByteLength, oldByteLength, isVersionedObj,
null, ingestSize);
return callback(null, responseHeaders);
}); });
monitoring.promMetrics('PUT', bucketName, '200',
'putObject', newByteLength, oldByteLength, isVersionedObj,
null, ingestSize);
return callback(null, responseHeaders);
}); });
});
} }
module.exports = objectPut; module.exports = objectPut;

View File

@ -7,8 +7,7 @@ const { pushMetric } = require('../utapi/utilities');
const collectCorsHeaders = require('../utilities/collectCorsHeaders'); const collectCorsHeaders = require('../utilities/collectCorsHeaders');
const constants = require('../../constants'); const constants = require('../../constants');
const vault = require('../auth/vault'); const vault = require('../auth/vault');
const { decodeVersionId, getVersionIdResHeader } const { decodeVersionId, getVersionIdResHeader } = require('./apiUtils/object/versioning');
= require('./apiUtils/object/versioning');
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils'); const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
const monitoring = require('../utilities/metrics'); const monitoring = require('../utilities/metrics');
const { config } = require('../Config'); const { config } = require('../Config');
@ -45,8 +44,8 @@ const { config } = require('../Config');
*/ */
function objectPutACL(authInfo, request, log, cb) { function objectPutACL(authInfo, request, log, cb) {
log.debug('processing request', { method: 'objectPutACL' }); log.debug('processing request', { method: 'objectPutACL' });
const bucketName = request.bucketName; const { bucketName } = request;
const objectKey = request.objectKey; const { objectKey } = request;
const newCannedACL = request.headers['x-amz-acl']; const newCannedACL = request.headers['x-amz-acl'];
const possibleCannedACL = [ const possibleCannedACL = [
'private', 'private',
@ -88,7 +87,7 @@ function objectPutACL(authInfo, request, log, cb) {
objectKey, objectKey,
versionId: reqVersionId, versionId: reqVersionId,
getDeleteMarker: true, getDeleteMarker: true,
requestType: 'objectPutACL', requestType: request.apiMethods || 'objectPutACL',
}; };
const possibleGrants = ['FULL_CONTROL', 'WRITE_ACP', 'READ', 'READ_ACP']; const possibleGrants = ['FULL_CONTROL', 'WRITE_ACP', 'READ', 'READ_ACP'];
@ -100,31 +99,31 @@ function objectPutACL(authInfo, request, log, cb) {
READ_ACP: [], READ_ACP: [],
}; };
const grantReadHeader = const grantReadHeader = aclUtils.parseGrant(request.headers['x-amz-grant-read'], 'READ');
aclUtils.parseGrant(request.headers['x-amz-grant-read'], 'READ'); const grantReadACPHeader = aclUtils.parseGrant(request.headers['x-amz-grant-read-acp'],
const grantReadACPHeader = 'READ_ACP');
aclUtils.parseGrant(request.headers['x-amz-grant-read-acp'],
'READ_ACP');
const grantWriteACPHeader = aclUtils.parseGrant( const grantWriteACPHeader = aclUtils.parseGrant(
request.headers['x-amz-grant-write-acp'], 'WRITE_ACP'); request.headers['x-amz-grant-write-acp'], 'WRITE_ACP',
);
const grantFullControlHeader = aclUtils.parseGrant( const grantFullControlHeader = aclUtils.parseGrant(
request.headers['x-amz-grant-full-control'], 'FULL_CONTROL'); request.headers['x-amz-grant-full-control'], 'FULL_CONTROL',
);
return async.waterfall([ return async.waterfall([
function validateBucketAndObj(next) { function validateBucketAndObj(next) {
return metadataValidateBucketAndObj(metadataValParams, log, return metadataValidateBucketAndObj(metadataValParams, request.iamAuthzResults, log,
(err, bucket, objectMD) => { (err, bucket, objectMD) => {
if (err) { if (err) {
return next(err); return next(err);
} }
if (!objectMD) { if (!objectMD) {
const err = reqVersionId ? errors.NoSuchVersion : const err = reqVersionId ? errors.NoSuchVersion
errors.NoSuchKey; : errors.NoSuchKey;
return next(err, bucket); return next(err, bucket);
} }
if (objectMD.isDeleteMarker) { if (objectMD.isDeleteMarker) {
log.trace('delete marker detected', log.trace('delete marker detected',
{ method: 'objectPutACL' }); { method: 'objectPutACL' });
// FIXME we should return a `x-amz-delete-marker: true` header, // FIXME we should return a `x-amz-delete-marker: true` header,
// see S3C-7592 // see S3C-7592
return next(errors.MethodNotAllowed, bucket); return next(errors.MethodNotAllowed, bucket);
@ -208,7 +207,7 @@ function objectPutACL(authInfo, request, log, cb) {
if (!skip && granteeType === 'Group') { if (!skip && granteeType === 'Group') {
if (possibleGroups.indexOf(grantee.URI[0]) < 0) { if (possibleGroups.indexOf(grantee.URI[0]) < 0) {
log.trace('invalid user group', log.trace('invalid user group',
{ userGroup: grantee.URI[0] }); { userGroup: grantee.URI[0] });
return next(errors.InvalidArgument, bucket); return next(errors.InvalidArgument, bucket);
} }
return usersIdentifiedByGroup.push({ return usersIdentifiedByGroup.push({
@ -222,22 +221,24 @@ function objectPutACL(authInfo, request, log, cb) {
} else { } else {
// If no canned ACL and no parsed xml, loop // If no canned ACL and no parsed xml, loop
// through the access headers // through the access headers
const allGrantHeaders = const allGrantHeaders = [].concat(grantReadHeader,
[].concat(grantReadHeader,
grantReadACPHeader, grantWriteACPHeader, grantReadACPHeader, grantWriteACPHeader,
grantFullControlHeader); grantFullControlHeader);
usersIdentifiedByEmail = allGrantHeaders.filter(item => usersIdentifiedByEmail = allGrantHeaders.filter(item => item
item && item.userIDType.toLowerCase() === 'emailaddress'); && item.userIDType.toLowerCase() === 'emailaddress');
usersIdentifiedByGroup = allGrantHeaders usersIdentifiedByGroup = allGrantHeaders
.filter(itm => itm && itm.userIDType .filter(itm => itm && itm.userIDType
.toLowerCase() === 'uri'); .toLowerCase() === 'uri');
for (let i = 0; i < usersIdentifiedByGroup.length; i++) { for (let i = 0; i < usersIdentifiedByGroup.length; i += 1) {
if (possibleGroups.indexOf( if (possibleGroups.indexOf(
usersIdentifiedByGroup[i].identifier) < 0) { usersIdentifiedByGroup[i].identifier,
) < 0) {
log.trace('invalid user group', log.trace('invalid user group',
{ userGroup: usersIdentifiedByGroup[i] {
.identifier }); userGroup: usersIdentifiedByGroup[i]
.identifier,
});
return next(errors.InvalidArgument, bucket); return next(errors.InvalidArgument, bucket);
} }
} }
@ -265,18 +266,20 @@ function objectPutACL(authInfo, request, log, cb) {
const allUsers = [].concat( const allUsers = [].concat(
reconstructedUsersIdentifiedByEmail, reconstructedUsersIdentifiedByEmail,
usersIdentifiedByID, usersIdentifiedByID,
usersIdentifiedByGroup); usersIdentifiedByGroup,
);
const revisedAddACLParams = aclUtils const revisedAddACLParams = aclUtils
.sortHeaderGrants(allUsers, addACLParams); .sortHeaderGrants(allUsers, addACLParams);
return next(null, bucket, objectMD, return next(null, bucket, objectMD,
revisedAddACLParams); revisedAddACLParams);
}); },
);
} }
const allUsers = [].concat( const allUsers = [].concat(
usersIdentifiedByID, usersIdentifiedByID,
usersIdentifiedByGroup); usersIdentifiedByGroup,
const revisedAddACLParams = );
aclUtils.sortHeaderGrants(allUsers, addACLParams); const revisedAddACLParams = aclUtils.sortHeaderGrants(allUsers, addACLParams);
return next(null, bucket, objectMD, revisedAddACLParams); return next(null, bucket, objectMD, revisedAddACLParams);
}, },
function addAclsToObjMD(bucket, objectMD, ACLParams, next) { function addAclsToObjMD(bucket, objectMD, ACLParams, next) {
@ -300,13 +303,13 @@ function objectPutACL(authInfo, request, log, cb) {
method: 'objectPutACL', method: 'objectPutACL',
}); });
monitoring.promMetrics( monitoring.promMetrics(
'PUT', bucketName, err.code, 'putObjectAcl'); 'PUT', bucketName, err.code, 'putObjectAcl',
);
return cb(err, resHeaders); return cb(err, resHeaders);
} }
const verCfg = bucket.getVersioningConfiguration(); const verCfg = bucket.getVersioningConfiguration();
resHeaders['x-amz-version-id'] = resHeaders['x-amz-version-id'] = getVersionIdResHeader(verCfg, objectMD);
getVersionIdResHeader(verCfg, objectMD);
log.trace('processed request successfully in object put acl api'); log.trace('processed request successfully in object put acl api');
pushMetric('putObjectAcl', log, { pushMetric('putObjectAcl', log, {

View File

@ -1,12 +1,12 @@
const async = require('async'); const async = require('async');
const { errors, versioning, s3middleware } = require('arsenal'); const { errors, versioning, s3middleware } = require('arsenal');
const validateHeaders = s3middleware.validateConditionalHeaders; const validateHeaders = s3middleware.validateConditionalHeaders;
const collectCorsHeaders = require('../utilities/collectCorsHeaders'); const collectCorsHeaders = require('../utilities/collectCorsHeaders');
const constants = require('../../constants'); const constants = require('../../constants');
const { data } = require('../data/wrapper'); const { data } = require('../data/wrapper');
const locationConstraintCheck = const locationConstraintCheck = require('./apiUtils/object/locationConstraintCheck');
require('./apiUtils/object/locationConstraintCheck');
const metadata = require('../metadata/wrapper'); const metadata = require('../metadata/wrapper');
const { pushMetric } = require('../utapi/utilities'); const { pushMetric } = require('../utapi/utilities');
const logger = require('../utilities/logger'); const logger = require('../utilities/logger');
@ -62,8 +62,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
// Note that keys in the query object retain their case, so // Note that keys in the query object retain their case, so
// request.query.uploadId must be called with that exact // request.query.uploadId must be called with that exact
// capitalization // capitalization
const uploadId = request.query.uploadId; const { uploadId } = request.query;
const valPutParams = { const valPutParams = {
authInfo, authInfo,
bucketName: destBucketName, bucketName: destBucketName,
@ -93,26 +92,26 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
return async.waterfall([ return async.waterfall([
function checkDestAuth(next) { function checkDestAuth(next) {
return metadataValidateBucketAndObj(valPutParams, log, return metadataValidateBucketAndObj(valPutParams, request.iamAuthzResults, log,
(err, destBucketMD) => { (err, destBucketMD) => {
if (err) { if (err) {
log.debug('error validating authorization for ' + log.debug('error validating authorization for '
'destination bucket', + 'destination bucket',
{ error: err }); { error: err });
return next(err, destBucketMD); return next(err, destBucketMD);
} }
const flag = destBucketMD.hasDeletedFlag() const flag = destBucketMD.hasDeletedFlag()
|| destBucketMD.hasTransientFlag(); || destBucketMD.hasTransientFlag();
if (flag) { if (flag) {
log.trace('deleted flag or transient flag ' + log.trace('deleted flag or transient flag '
'on destination bucket', { flag }); + 'on destination bucket', { flag });
return next(errors.NoSuchBucket); return next(errors.NoSuchBucket);
} }
return next(null, destBucketMD); return next(null, destBucketMD);
}); });
}, },
function checkSourceAuthorization(destBucketMD, next) { function checkSourceAuthorization(destBucketMD, next) {
return metadataValidateBucketAndObj(valGetParams, log, return metadataValidateBucketAndObj(valGetParams, request.iamAuthzResults, log,
(err, sourceBucketMD, sourceObjMD) => { (err, sourceBucketMD, sourceObjMD) => {
if (err) { if (err) {
log.debug('error validating get part of request', log.debug('error validating get part of request',
@ -121,28 +120,26 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
} }
if (!sourceObjMD) { if (!sourceObjMD) {
log.debug('no source object', { sourceObject }); log.debug('no source object', { sourceObject });
const err = reqVersionId ? errors.NoSuchVersion : const err = reqVersionId ? errors.NoSuchVersion
errors.NoSuchKey; : errors.NoSuchKey;
return next(err, destBucketMD); return next(err, destBucketMD);
} }
let sourceLocationConstraintName = let sourceLocationConstraintName = sourceObjMD.dataStoreName;
sourceObjMD.dataStoreName;
// for backwards compatibility before storing dataStoreName // for backwards compatibility before storing dataStoreName
// TODO: handle in objectMD class // TODO: handle in objectMD class
if (!sourceLocationConstraintName && if (!sourceLocationConstraintName
sourceObjMD.location[0] && && sourceObjMD.location[0]
sourceObjMD.location[0].dataStoreName) { && sourceObjMD.location[0].dataStoreName) {
sourceLocationConstraintName = sourceLocationConstraintName = sourceObjMD.location[0].dataStoreName;
sourceObjMD.location[0].dataStoreName;
} }
if (sourceObjMD.isDeleteMarker) { if (sourceObjMD.isDeleteMarker) {
log.debug('delete marker on source object', log.debug('delete marker on source object',
{ sourceObject }); { sourceObject });
if (reqVersionId) { if (reqVersionId) {
const err = errors.InvalidRequest const err = errors.InvalidRequest
.customizeDescription('The source of a copy ' + .customizeDescription('The source of a copy '
'request may not specifically refer to a delete' + + 'request may not specifically refer to a delete'
'marker by version id.'); + 'marker by version id.');
return next(err, destBucketMD); return next(err, destBucketMD);
} }
// if user specifies a key in a versioned source bucket // if user specifies a key in a versioned source bucket
@ -150,8 +147,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
// delete marker, return NoSuchKey // delete marker, return NoSuchKey
return next(errors.NoSuchKey, destBucketMD); return next(errors.NoSuchKey, destBucketMD);
} }
const headerValResult = const headerValResult = validateHeaders(request.headers,
validateHeaders(request.headers,
sourceObjMD['last-modified'], sourceObjMD['last-modified'],
sourceObjMD['content-md5']); sourceObjMD['content-md5']);
if (headerValResult.error) { if (headerValResult.error) {
@ -166,15 +162,15 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
// If specific version requested, include copy source // If specific version requested, include copy source
// version id in response. Include in request by default // version id in response. Include in request by default
// if versioning is enabled or suspended. // if versioning is enabled or suspended.
if (sourceBucketMD.getVersioningConfiguration() || if (sourceBucketMD.getVersioningConfiguration()
reqVersionId) { || reqVersionId) {
if (sourceObjMD.isNull || !sourceObjMD.versionId) { if (sourceObjMD.isNull || !sourceObjMD.versionId) {
sourceVerId = 'null'; sourceVerId = 'null';
} else { } else {
sourceVerId = sourceVerId = versionIdUtils.encode(
versionIdUtils.encode( sourceObjMD.versionId,
sourceObjMD.versionId, config.versionIdEncodingType,
config.versionIdEncodingType); );
} }
} }
return next(null, copyLocator.dataLocator, destBucketMD, return next(null, copyLocator.dataLocator, destBucketMD,
@ -199,7 +195,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
}); });
return next(err); return next(err);
} }
let splitter = constants.splitter; let { splitter } = constants;
if (mpuBucket.getMdBucketModelVersion() < 2) { if (mpuBucket.getMdBucketModelVersion() < 2) {
splitter = constants.oldSplitter; splitter = constants.oldSplitter;
} }
@ -213,35 +209,33 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
function getMpuOverviewObject(dataLocator, destBucketMD, function getMpuOverviewObject(dataLocator, destBucketMD,
copyObjectSize, sourceVerId, splitter, copyObjectSize, sourceVerId, splitter,
sourceLocationConstraintName, next) { sourceLocationConstraintName, next) {
const mpuOverviewKey = const mpuOverviewKey = `overview${splitter}${destObjectKey}${splitter}${uploadId}`;
`overview${splitter}${destObjectKey}${splitter}${uploadId}`;
return metadata.getObjectMD(mpuBucketName, mpuOverviewKey, return metadata.getObjectMD(mpuBucketName, mpuOverviewKey,
null, log, (err, res) => { null, log, (err, res) => {
if (err) { if (err) {
// TODO: move to `.is` once BKTCLT-9 is done and bumped in Cloudserver // TODO: move to `.is` once BKTCLT-9 is done and bumped in Cloudserver
if (err.NoSuchKey) { if (err.NoSuchKey) {
return next(errors.NoSuchUpload); return next(errors.NoSuchUpload);
}
log.error('error getting overview object from ' +
'mpu bucket', {
error: err,
method: 'objectPutCopyPart::' +
'metadata.getObjectMD',
});
return next(err);
} }
const initiatorID = res.initiator.ID; log.error('error getting overview object from '
const requesterID = authInfo.isRequesterAnIAMUser() ? + 'mpu bucket', {
authInfo.getArn() : authInfo.getCanonicalID(); error: err,
if (initiatorID !== requesterID) { method: 'objectPutCopyPart::'
return next(errors.AccessDenied); + 'metadata.getObjectMD',
} });
const destObjLocationConstraint = return next(err);
res.controllingLocationConstraint; }
return next(null, dataLocator, destBucketMD, const initiatorID = res.initiator.ID;
destObjLocationConstraint, copyObjectSize, const requesterID = authInfo.isRequesterAnIAMUser()
sourceVerId, sourceLocationConstraintName, splitter); ? authInfo.getArn() : authInfo.getCanonicalID();
}); if (initiatorID !== requesterID) {
return next(errors.AccessDenied);
}
const destObjLocationConstraint = res.controllingLocationConstraint;
return next(null, dataLocator, destBucketMD,
destObjLocationConstraint, copyObjectSize,
sourceVerId, sourceLocationConstraintName, splitter);
});
}, },
function goGetData( function goGetData(
dataLocator, dataLocator,
@ -253,6 +247,9 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
splitter, splitter,
next, next,
) { ) {
const originalIdentityAuthzResults = request.iamAuthzResults;
// eslint-disable-next-line no-param-reassign
delete request.iamAuthzResults;
data.uploadPartCopy( data.uploadPartCopy(
request, request,
log, log,
@ -263,31 +260,33 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
dataStoreContext, dataStoreContext,
locationConstraintCheck, locationConstraintCheck,
(error, eTag, lastModified, serverSideEncryption, locations) => { (error, eTag, lastModified, serverSideEncryption, locations) => {
// eslint-disable-next-line no-param-reassign
request.iamAuthzResults = originalIdentityAuthzResults;
if (error) { if (error) {
if (error.message === 'skip') { if (error.message === 'skip') {
return next(skipError, destBucketMD, eTag, return next(skipError, destBucketMD, eTag,
lastModified, sourceVerId, lastModified, sourceVerId,
serverSideEncryption); serverSideEncryption);
} }
return next(error, destBucketMD); return next(error, destBucketMD);
} }
return next(null, destBucketMD, locations, eTag, return next(null, destBucketMD, locations, eTag,
copyObjectSize, sourceVerId, serverSideEncryption, copyObjectSize, sourceVerId, serverSideEncryption,
lastModified, splitter); lastModified, splitter);
}); },
);
}, },
function getExistingPartInfo(destBucketMD, locations, totalHash, function getExistingPartInfo(destBucketMD, locations, totalHash,
copyObjectSize, sourceVerId, serverSideEncryption, lastModified, copyObjectSize, sourceVerId, serverSideEncryption, lastModified,
splitter, next) { splitter, next) {
const partKey = const partKey = `${uploadId}${constants.splitter}${paddedPartNumber}`;
`${uploadId}${constants.splitter}${paddedPartNumber}`;
metadata.getObjectMD(mpuBucketName, partKey, {}, log, metadata.getObjectMD(mpuBucketName, partKey, {}, log,
(err, result) => { (err, result) => {
// If there is nothing being overwritten just move on // If there is nothing being overwritten just move on
// TODO: move to `.is` once BKTCLT-9 is done and bumped in Cloudserver // TODO: move to `.is` once BKTCLT-9 is done and bumped in Cloudserver
if (err && !err.NoSuchKey) { if (err && !err.NoSuchKey) {
log.debug('error getting current part (if any)', log.debug('error getting current part (if any)',
{ error: err }); { error: err });
return next(err); return next(err);
} }
let oldLocations; let oldLocations;
@ -298,8 +297,8 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
// Pull locations to clean up any potential orphans // Pull locations to clean up any potential orphans
// in data if object put is an overwrite of // in data if object put is an overwrite of
// already existing object with same key and part number // already existing object with same key and part number
oldLocations = Array.isArray(oldLocations) ? oldLocations = Array.isArray(oldLocations)
oldLocations : [oldLocations]; ? oldLocations : [oldLocations];
} }
return next(null, destBucketMD, locations, totalHash, return next(null, destBucketMD, locations, totalHash,
prevObjectSize, copyObjectSize, sourceVerId, prevObjectSize, copyObjectSize, sourceVerId,
@ -321,7 +320,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
locations, metaStoreParams, log, err => { locations, metaStoreParams, log, err => {
if (err) { if (err) {
log.debug('error storing new metadata', log.debug('error storing new metadata',
{ error: err, method: 'storeNewPartMetadata' }); { error: err, method: 'storeNewPartMetadata' });
return next(err); return next(err);
} }
return next(null, locations, oldLocations, destBucketMD, totalHash, return next(null, locations, oldLocations, destBucketMD, totalHash,
@ -374,7 +373,8 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
// data locations) has been stored // data locations) has been stored
if (oldLocationsToDelete) { if (oldLocationsToDelete) {
const delLog = logger.newRequestLoggerFromSerializedUids( const delLog = logger.newRequestLoggerFromSerializedUids(
log.getSerializedUids()); log.getSerializedUids(),
);
return data.batchDelete(oldLocationsToDelete, request.method, null, return data.batchDelete(oldLocationsToDelete, request.method, null,
delLog, err => { delLog, err => {
if (err) { if (err) {
@ -399,7 +399,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
request.method, destBucketMD); request.method, destBucketMD);
if (err && err !== skipError) { if (err && err !== skipError) {
log.trace('error from copy part waterfall', log.trace('error from copy part waterfall',
{ error: err }); { error: err });
monitoring.promMetrics('PUT', destBucketName, err.code, monitoring.promMetrics('PUT', destBucketName, err.code,
'putObjectCopyPart'); 'putObjectCopyPart');
return callback(err, null, corsHeaders); return callback(err, null, corsHeaders);
@ -415,11 +415,9 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
const additionalHeaders = corsHeaders || {}; const additionalHeaders = corsHeaders || {};
if (serverSideEncryption) { if (serverSideEncryption) {
additionalHeaders['x-amz-server-side-encryption'] = additionalHeaders['x-amz-server-side-encryption'] = serverSideEncryption.algorithm;
serverSideEncryption.algorithm;
if (serverSideEncryption.algorithm === 'aws:kms') { if (serverSideEncryption.algorithm === 'aws:kms') {
additionalHeaders['x-amz-server-side-encryption-aws-kms-key-id'] additionalHeaders['x-amz-server-side-encryption-aws-kms-key-id'] = serverSideEncryption.masterKeyId;
= serverSideEncryption.masterKeyId;
} }
} }
additionalHeaders['x-amz-copy-source-version-id'] = sourceVerId; additionalHeaders['x-amz-copy-source-version-id'] = sourceVerId;
@ -433,7 +431,8 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
location: destBucketMD.getLocationConstraint(), location: destBucketMD.getLocationConstraint(),
}); });
monitoring.promMetrics( monitoring.promMetrics(
'PUT', destBucketName, '200', 'putObjectCopyPart'); 'PUT', destBucketName, '200', 'putObjectCopyPart',
);
return callback(null, xml, additionalHeaders); return callback(null, xml, additionalHeaders);
}); });
} }

View File

@ -43,41 +43,41 @@ function objectPutLegalHold(authInfo, request, log, callback) {
objectKey, objectKey,
versionId, versionId,
getDeleteMarker: true, getDeleteMarker: true,
requestType: 'objectPutLegalHold', requestType: request.apiMethods || 'objectPutLegalHold',
request, request,
}; };
return async.waterfall([ return async.waterfall([
next => metadataValidateBucketAndObj(metadataValParams, log, next => metadataValidateBucketAndObj(metadataValParams, request.iamAuthzResults, log,
(err, bucket, objectMD) => { (err, bucket, objectMD) => {
if (err) { if (err) {
log.trace('request authorization failed', log.trace('request authorization failed',
{ method: 'objectPutLegalHold', error: err }); { method: 'objectPutLegalHold', error: err });
return next(err); return next(err);
} }
if (!objectMD) { if (!objectMD) {
const err = versionId ? errors.NoSuchVersion : const err = versionId ? errors.NoSuchVersion :
errors.NoSuchKey; errors.NoSuchKey;
log.trace('error no object metadata found', log.trace('error no object metadata found',
{ method: 'objectPutLegalHold', error: err }); { method: 'objectPutLegalHold', error: err });
return next(err, bucket); return next(err, bucket);
} }
if (objectMD.isDeleteMarker) { if (objectMD.isDeleteMarker) {
log.trace('version is a delete marker', log.trace('version is a delete marker',
{ method: 'objectPutLegalHold' }); { method: 'objectPutLegalHold' });
// FIXME we should return a `x-amz-delete-marker: true` header, // FIXME we should return a `x-amz-delete-marker: true` header,
// see S3C-7592 // see S3C-7592
return next(errors.MethodNotAllowed, bucket); return next(errors.MethodNotAllowed, bucket);
} }
if (!bucket.isObjectLockEnabled()) { if (!bucket.isObjectLockEnabled()) {
log.trace('object lock not enabled on bucket', log.trace('object lock not enabled on bucket',
{ method: 'objectPutLegalHold' }); { method: 'objectPutLegalHold' });
return next(errors.InvalidRequest.customizeDescription( return next(errors.InvalidRequest.customizeDescription(
'Bucket is missing Object Lock Configuration' 'Bucket is missing Object Lock Configuration'
), bucket); ), bucket);
} }
return next(null, bucket, objectMD); return next(null, bucket, objectMD);
}), }),
(bucket, objectMD, next) => { (bucket, objectMD, next) => {
log.trace('parsing legal hold'); log.trace('parsing legal hold');
parseLegalHoldXml(request.post, log, (err, res) => parseLegalHoldXml(request.post, log, (err, res) =>

View File

@ -94,6 +94,7 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
const uploadId = request.query.uploadId; const uploadId = request.query.uploadId;
const mpuBucketName = `${constants.mpuBucketPrefix}${bucketName}`; const mpuBucketName = `${constants.mpuBucketPrefix}${bucketName}`;
const objectKey = request.objectKey; const objectKey = request.objectKey;
const originalIdentityAuthzResults = request.iamAuthzResults;
return async.waterfall([ return async.waterfall([
// Get the destination bucket. // Get the destination bucket.
@ -116,7 +117,8 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
// For validating the request at the destinationBucket level the // For validating the request at the destinationBucket level the
// `requestType` is the general 'objectPut'. // `requestType` is the general 'objectPut'.
const requestType = 'objectPut'; const requestType = 'objectPut';
if (!isBucketAuthorized(destinationBucket, requestType, canonicalID, authInfo, log, request)) { if (!isBucketAuthorized(destinationBucket, request.apiMethods || requestType, canonicalID, authInfo,
request.iamAuthzResults, log, request)) {
log.debug('access denied for user on bucket', { requestType }); log.debug('access denied for user on bucket', { requestType });
return next(errors.AccessDenied, destinationBucket); return next(errors.AccessDenied, destinationBucket);
} }
@ -133,8 +135,8 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
if (err) { if (err) {
log.error('error processing the cipher bundle for ' + log.error('error processing the cipher bundle for ' +
'the destination bucket', { 'the destination bucket', {
error: err, error: err,
}); });
return next(err, destinationBucket); return next(err, destinationBucket);
} }
return next(null, destinationBucket, res); return next(null, destinationBucket, res);
@ -146,24 +148,24 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
// Get the MPU shadow bucket. // Get the MPU shadow bucket.
(destinationBucket, cipherBundle, next) => (destinationBucket, cipherBundle, next) =>
metadata.getBucket(mpuBucketName, log, metadata.getBucket(mpuBucketName, log,
(err, mpuBucket) => { (err, mpuBucket) => {
if (err && err.is.NoSuchBucket) { if (err && err.is.NoSuchBucket) {
return next(errors.NoSuchUpload, destinationBucket); return next(errors.NoSuchUpload, destinationBucket);
} }
if (err) { if (err) {
log.error('error getting the shadow mpu bucket', { log.error('error getting the shadow mpu bucket', {
error: err, error: err,
method: 'objectPutPart::metadata.getBucket', method: 'objectPutPart::metadata.getBucket',
}); });
return next(err, destinationBucket); return next(err, destinationBucket);
} }
let splitter = constants.splitter; let splitter = constants.splitter;
// BACKWARD: Remove to remove the old splitter // BACKWARD: Remove to remove the old splitter
if (mpuBucket.getMdBucketModelVersion() < 2) { if (mpuBucket.getMdBucketModelVersion() < 2) {
splitter = constants.oldSplitter; splitter = constants.oldSplitter;
} }
return next(null, destinationBucket, cipherBundle, splitter); return next(null, destinationBucket, cipherBundle, splitter);
}), }),
// Check authorization of the MPU shadow bucket. // Check authorization of the MPU shadow bucket.
(destinationBucket, cipherBundle, splitter, next) => { (destinationBucket, cipherBundle, splitter, next) => {
const mpuOverviewKey = _getOverviewKey(splitter, objectKey, const mpuOverviewKey = _getOverviewKey(splitter, objectKey,
@ -194,7 +196,7 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
// If data backend is backend that handles mpu (like real AWS), // If data backend is backend that handles mpu (like real AWS),
// no need to store part info in metadata // no need to store part info in metadata
(destinationBucket, objectLocationConstraint, cipherBundle, (destinationBucket, objectLocationConstraint, cipherBundle,
splitter, next) => { splitter, next) => {
const mpuInfo = { const mpuInfo = {
destinationBucket, destinationBucket,
size, size,
@ -203,24 +205,26 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
partNumber, partNumber,
bucketName, bucketName,
}; };
// eslint-disable-next-line no-param-reassign
delete request.iamAuthzResults;
writeContinue(request, request._response); writeContinue(request, request._response);
return data.putPart(request, mpuInfo, streamingV4Params, return data.putPart(request, mpuInfo, streamingV4Params,
objectLocationConstraint, locationConstraintCheck, log, objectLocationConstraint, locationConstraintCheck, log,
(err, partInfo, updatedObjectLC) => { (err, partInfo, updatedObjectLC) => {
if (err) { if (err) {
return next(err, destinationBucket); return next(err, destinationBucket);
} }
// if data backend handles mpu, skip to end of waterfall // if data backend handles mpu, skip to end of waterfall
if (partInfo && partInfo.dataStoreType === 'aws_s3') { if (partInfo && partInfo.dataStoreType === 'aws_s3') {
return next(skipError, destinationBucket, return next(skipError, destinationBucket,
partInfo.dataStoreETag); partInfo.dataStoreETag);
} }
// partInfo will be null if data backend is not external // partInfo will be null if data backend is not external
// if the object location constraint undefined because // if the object location constraint undefined because
// mpu was initiated in legacy version, update it // mpu was initiated in legacy version, update it
return next(null, destinationBucket, updatedObjectLC, return next(null, destinationBucket, updatedObjectLC,
cipherBundle, splitter, partInfo); cipherBundle, splitter, partInfo);
}); });
}, },
// Get any pre-existing part. // Get any pre-existing part.
(destinationBucket, objectLocationConstraint, cipherBundle, (destinationBucket, objectLocationConstraint, cipherBundle,
@ -256,14 +260,14 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
}, },
// Store in data backend. // Store in data backend.
(destinationBucket, objectLocationConstraint, cipherBundle, (destinationBucket, objectLocationConstraint, cipherBundle,
partKey, prevObjectSize, oldLocations, partInfo, splitter, next) => { partKey, prevObjectSize, oldLocations, partInfo, splitter, next) => {
// NOTE: set oldLocations to null so we do not batchDelete for now // NOTE: set oldLocations to null so we do not batchDelete for now
if (partInfo && partInfo.dataStoreType === 'azure') { if (partInfo && partInfo.dataStoreType === 'azure') {
// skip to storing metadata // skip to storing metadata
return next(null, destinationBucket, partInfo, return next(null, destinationBucket, partInfo,
partInfo.dataStoreETag, partInfo.dataStoreETag,
cipherBundle, partKey, prevObjectSize, null, cipherBundle, partKey, prevObjectSize, null,
objectLocationConstraint, splitter); objectLocationConstraint, splitter);
} }
const objectContext = { const objectContext = {
bucketName, bucketName,
@ -289,7 +293,7 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
// Store data locations in metadata and delete any overwritten // Store data locations in metadata and delete any overwritten
// data if completeMPU hasn't been initiated yet. // data if completeMPU hasn't been initiated yet.
(destinationBucket, dataGetInfo, hexDigest, cipherBundle, partKey, (destinationBucket, dataGetInfo, hexDigest, cipherBundle, partKey,
prevObjectSize, oldLocations, objectLocationConstraint, splitter, next) => { prevObjectSize, oldLocations, objectLocationConstraint, splitter, next) => {
// Use an array to be consistent with objectPutCopyPart where there // Use an array to be consistent with objectPutCopyPart where there
// could be multiple locations. // could be multiple locations.
const partLocations = [dataGetInfo]; const partLocations = [dataGetInfo];
@ -324,7 +328,7 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
}); });
}, },
(partLocations, oldLocations, objectLocationConstraint, destinationBucket, (partLocations, oldLocations, objectLocationConstraint, destinationBucket,
hexDigest, prevObjectSize, splitter, next) => { hexDigest, prevObjectSize, splitter, next) => {
if (!oldLocations) { if (!oldLocations) {
return next(null, oldLocations, objectLocationConstraint, return next(null, oldLocations, objectLocationConstraint,
destinationBucket, hexDigest, prevObjectSize); destinationBucket, hexDigest, prevObjectSize);
@ -385,6 +389,8 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
], (err, destinationBucket, hexDigest, prevObjectSize) => { ], (err, destinationBucket, hexDigest, prevObjectSize) => {
const corsHeaders = collectCorsHeaders(request.headers.origin, const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, destinationBucket); request.method, destinationBucket);
// eslint-disable-next-line no-param-reassign
request.iamAuthzResults = originalIdentityAuthzResults;
if (err) { if (err) {
if (err === skipError) { if (err === skipError) {
return cb(null, hexDigest, corsHeaders); return cb(null, hexDigest, corsHeaders);

View File

@ -44,46 +44,56 @@ function objectPutRetention(authInfo, request, log, callback) {
objectKey, objectKey,
versionId: reqVersionId, versionId: reqVersionId,
getDeleteMarker: true, getDeleteMarker: true,
requestType: 'objectPutRetention', requestType: request.apiMethods || 'objectPutRetention',
request, request,
}; };
return async.waterfall([ return async.waterfall([
next => metadataValidateBucketAndObj(metadataValParams, log, next => {
(err, bucket, objectMD) => {
if (err) {
log.trace('request authorization failed',
{ method: 'objectPutRetention', error: err });
return next(err);
}
if (!objectMD) {
const err = reqVersionId ? errors.NoSuchVersion :
errors.NoSuchKey;
log.trace('error no object metadata found',
{ method: 'objectPutRetention', error: err });
return next(err, bucket);
}
if (objectMD.isDeleteMarker) {
log.trace('version is a delete marker',
{ method: 'objectPutRetention' });
// FIXME we should return a `x-amz-delete-marker: true` header,
// see S3C-7592
return next(errors.MethodNotAllowed, bucket);
}
if (!bucket.isObjectLockEnabled()) {
log.trace('object lock not enabled on bucket',
{ method: 'objectPutRetention' });
return next(errors.InvalidRequest.customizeDescription(
'Bucket is missing Object Lock Configuration'
), bucket);
}
return next(null, bucket, objectMD);
}),
(bucket, objectMD, next) => {
log.trace('parsing retention information'); log.trace('parsing retention information');
parseRetentionXml(request.post, log, parseRetentionXml(request.post, log,
(err, retentionInfo) => next(err, bucket, retentionInfo, objectMD)); (err, retentionInfo) => {
if (err) {
log.trace('error parsing retention information',
{ error: err });
return next(err);
}
const remainingDays = Math.ceil(
(new Date(retentionInfo.date) - Date.now()) / (1000 * 3600 * 24));
metadataValParams.request.objectLockRetentionDays = remainingDays;
return next(null, retentionInfo);
});
}, },
(retentionInfo, next) => metadataValidateBucketAndObj(metadataValParams, request.iamAuthzResults, log,
(err, bucket, objectMD) => {
if (err) {
log.trace('request authorization failed',
{ method: 'objectPutRetention', error: err });
return next(err);
}
if (!objectMD) {
const err = reqVersionId ? errors.NoSuchVersion :
errors.NoSuchKey;
log.trace('error no object metadata found',
{ method: 'objectPutRetention', error: err });
return next(err, bucket);
}
if (objectMD.isDeleteMarker) {
log.trace('version is a delete marker',
{ method: 'objectPutRetention' });
// FIXME we should return a `x-amz-delete-marker: true` header,
// see S3C-7592
return next(errors.MethodNotAllowed, bucket);
}
if (!bucket.isObjectLockEnabled()) {
log.trace('object lock not enabled on bucket',
{ method: 'objectPutRetention' });
return next(errors.InvalidRequest.customizeDescription(
'Bucket is missing Object Lock Configuration'
), bucket);
}
return next(null, bucket, retentionInfo, objectMD);
}),
(bucket, retentionInfo, objectMD, next) => { (bucket, retentionInfo, objectMD, next) => {
const hasGovernanceBypass = hasGovernanceBypassHeader(request.headers); const hasGovernanceBypass = hasGovernanceBypassHeader(request.headers);
if (hasGovernanceBypass && authInfo.isRequesterAnIAMUser()) { if (hasGovernanceBypass && authInfo.isRequesterAnIAMUser()) {

View File

@ -1,8 +1,7 @@
const async = require('async'); const async = require('async');
const { errors, s3middleware } = require('arsenal'); const { errors, s3middleware } = require('arsenal');
const { decodeVersionId, getVersionIdResHeader } = const { decodeVersionId, getVersionIdResHeader } = require('./apiUtils/object/versioning');
require('./apiUtils/object/versioning');
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils'); const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
const { pushMetric } = require('../utapi/utilities'); const { pushMetric } = require('../utapi/utilities');
@ -11,8 +10,10 @@ const getReplicationInfo = require('./apiUtils/object/getReplicationInfo');
const collectCorsHeaders = require('../utilities/collectCorsHeaders'); const collectCorsHeaders = require('../utilities/collectCorsHeaders');
const metadata = require('../metadata/wrapper'); const metadata = require('../metadata/wrapper');
const { data } = require('../data/wrapper'); const { data } = require('../data/wrapper');
const { parseTagXml } = s3middleware.tagging; const { parseTagXml } = s3middleware.tagging;
const { config } = require('../Config'); const { config } = require('../Config');
const REPLICATION_ACTION = 'PUT_TAGGING'; const REPLICATION_ACTION = 'PUT_TAGGING';
/** /**
@ -26,8 +27,8 @@ const REPLICATION_ACTION = 'PUT_TAGGING';
function objectPutTagging(authInfo, request, log, callback) { function objectPutTagging(authInfo, request, log, callback) {
log.debug('processing request', { method: 'objectPutTagging' }); log.debug('processing request', { method: 'objectPutTagging' });
const bucketName = request.bucketName; const { bucketName } = request;
const objectKey = request.objectKey; const { objectKey } = request;
const decodedVidResult = decodeVersionId(request.query); const decodedVidResult = decodeVersionId(request.query);
if (decodedVidResult instanceof Error) { if (decodedVidResult instanceof Error) {
@ -45,38 +46,37 @@ function objectPutTagging(authInfo, request, log, callback) {
objectKey, objectKey,
versionId: reqVersionId, versionId: reqVersionId,
getDeleteMarker: true, getDeleteMarker: true,
requestType: 'objectPutTagging', requestType: request.apiMethods || 'objectPutTagging',
request, request,
}; };
return async.waterfall([ return async.waterfall([
next => metadataValidateBucketAndObj(metadataValParams, log, next => metadataValidateBucketAndObj(metadataValParams, request.iamAuthzResults, log,
(err, bucket, objectMD) => { (err, bucket, objectMD) => {
if (err) { if (err) {
log.trace('request authorization failed', log.trace('request authorization failed',
{ method: 'objectPutTagging', error: err }); { method: 'objectPutTagging', error: err });
return next(err); return next(err);
} }
if (!objectMD) { if (!objectMD) {
const err = reqVersionId ? errors.NoSuchVersion : const err = reqVersionId ? errors.NoSuchVersion
errors.NoSuchKey; : errors.NoSuchKey;
log.trace('error no object metadata found', log.trace('error no object metadata found',
{ method: 'objectPutTagging', error: err }); { method: 'objectPutTagging', error: err });
return next(err, bucket); return next(err, bucket);
} }
if (objectMD.isDeleteMarker) { if (objectMD.isDeleteMarker) {
log.trace('version is a delete marker', log.trace('version is a delete marker',
{ method: 'objectPutTagging' }); { method: 'objectPutTagging' });
// FIXME we should return a `x-amz-delete-marker: true` header, // FIXME we should return a `x-amz-delete-marker: true` header,
// see S3C-7592 // see S3C-7592
return next(errors.MethodNotAllowed, bucket); return next(errors.MethodNotAllowed, bucket);
} }
return next(null, bucket, objectMD); return next(null, bucket, objectMD);
}), }),
(bucket, objectMD, next) => { (bucket, objectMD, next) => {
log.trace('parsing tag(s)'); log.trace('parsing tag(s)');
parseTagXml(request.post, log, (err, tags) => parseTagXml(request.post, log, (err, tags) => next(err, bucket, tags, objectMD));
next(err, bucket, tags, objectMD));
}, },
(bucket, tags, objectMD, next) => { (bucket, tags, objectMD, next) => {
// eslint-disable-next-line no-param-reassign // eslint-disable-next-line no-param-reassign
@ -98,19 +98,19 @@ function objectPutTagging(authInfo, request, log, callback) {
// eslint-disable-next-line no-param-reassign // eslint-disable-next-line no-param-reassign
objectMD.originOp = 's3:ObjectTagging:Put'; objectMD.originOp = 's3:ObjectTagging:Put';
metadata.putObjectMD(bucket.getName(), objectKey, objectMD, params, metadata.putObjectMD(bucket.getName(), objectKey, objectMD, params,
log, err => log, err => next(err, bucket, objectMD));
next(err, bucket, objectMD));
}, },
(bucket, objectMD, next) => // if external backend handles tagging
// if external backend handles tagging (bucket, objectMD, next) => data.objectTagging('Put', objectKey, bucket, objectMD,
data.objectTagging('Put', objectKey, bucket, objectMD, log, err => next(err, bucket, objectMD)),
log, err => next(err, bucket, objectMD)),
], (err, bucket, objectMD) => { ], (err, bucket, objectMD) => {
const additionalResHeaders = collectCorsHeaders(request.headers.origin, const additionalResHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket); request.method, bucket);
if (err) { if (err) {
log.trace('error processing request', { error: err, log.trace('error processing request', {
method: 'objectPutTagging' }); error: err,
method: 'objectPutTagging',
});
monitoring.promMetrics('PUT', bucketName, err.code, monitoring.promMetrics('PUT', bucketName, err.code,
'putObjectTagging'); 'putObjectTagging');
} else { } else {
@ -122,10 +122,10 @@ function objectPutTagging(authInfo, request, log, callback) {
location: objectMD ? objectMD.dataStoreName : undefined, location: objectMD ? objectMD.dataStoreName : undefined,
}); });
monitoring.promMetrics( monitoring.promMetrics(
'PUT', bucketName, '200', 'putObjectTagging'); 'PUT', bucketName, '200', 'putObjectTagging',
);
const verCfg = bucket.getVersioningConfiguration(); const verCfg = bucket.getVersioningConfiguration();
additionalResHeaders['x-amz-version-id'] = additionalResHeaders['x-amz-version-id'] = getVersionIdResHeader(verCfg, objectMD);
getVersionIdResHeader(verCfg, objectMD);
} }
return callback(err, additionalResHeaders); return callback(err, additionalResHeaders);
}); });

View File

@ -22,12 +22,13 @@ const monitoring = require('../utilities/metrics');
* @param {string} objectKey - object key from request (or as translated in * @param {string} objectKey - object key from request (or as translated in
* websiteGet) * websiteGet)
* @param {object} corsHeaders - CORS-related response headers * @param {object} corsHeaders - CORS-related response headers
* @param {object} request - normalized request object
* @param {object} log - Werelogs instance * @param {object} log - Werelogs instance
* @param {function} callback - callback to function in route * @param {function} callback - callback to function in route
* @return {undefined} * @return {undefined}
*/ */
function _errorActions(err, errorDocument, routingRules, function _errorActions(err, errorDocument, routingRules,
bucket, objectKey, corsHeaders, log, callback) { bucket, objectKey, corsHeaders, request, log, callback) {
const bucketName = bucket.getName(); const bucketName = bucket.getName();
const errRoutingRule = findRoutingRule(routingRules, const errRoutingRule = findRoutingRule(routingRules,
objectKey, err.code); objectKey, err.code);
@ -51,8 +52,8 @@ function _errorActions(err, errorDocument, routingRules,
} }
// return the default error message if the object is private // return the default error message if the object is private
// rather than sending a stored error file // rather than sending a stored error file
if (!isObjAuthorized(bucket, errObjMD, 'objectGet', if (!isObjAuthorized(bucket, errObjMD, request.apiMethods || 'objectGet',
constants.publicId, null, log)) { constants.publicId, null, request.iamAuthzResults, log)) {
log.trace('errorObj not authorized', { error: err }); log.trace('errorObj not authorized', { error: err });
monitoring.promMetrics( monitoring.promMetrics(
'GET', bucketName, err.code, 'getObject'); 'GET', bucketName, err.code, 'getObject');
@ -162,8 +163,8 @@ function websiteGet(request, log, callback) {
monitoring.promMetrics( monitoring.promMetrics(
'GET', bucketName, err.code, 'getObject'); 'GET', bucketName, err.code, 'getObject');
let returnErr = err; let returnErr = err;
const bucketAuthorized = isBucketAuthorized(bucket, const bucketAuthorized = isBucketAuthorized(bucket, request.apiMethods || 'bucketGet',
'bucketGet', constants.publicId, null, log, request); constants.publicId, null, request.iamAuthzResults, log, request);
// if index object does not exist and bucket is private AWS // if index object does not exist and bucket is private AWS
// returns 403 - AccessDenied error. // returns 403 - AccessDenied error.
if (err.is.NoSuchKey && !bucketAuthorized) { if (err.is.NoSuchKey && !bucketAuthorized) {
@ -171,16 +172,16 @@ function websiteGet(request, log, callback) {
} }
return _errorActions(returnErr, return _errorActions(returnErr,
websiteConfig.getErrorDocument(), routingRules, websiteConfig.getErrorDocument(), routingRules,
bucket, reqObjectKey, corsHeaders, log, bucket, reqObjectKey, corsHeaders, request, log,
callback); callback);
} }
if (!isObjAuthorized(bucket, objMD, 'objectGet', if (!isObjAuthorized(bucket, objMD, request.apiMethods || 'objectGet',
constants.publicId, null, log, request)) { constants.publicId, null, request.iamAuthzResults, log, request)) {
const err = errors.AccessDenied; const err = errors.AccessDenied;
log.trace('request not authorized', { error: err }); log.trace('request not authorized', { error: err });
return _errorActions(err, websiteConfig.getErrorDocument(), return _errorActions(err, websiteConfig.getErrorDocument(),
routingRules, bucket, routingRules, bucket,
reqObjectKey, corsHeaders, log, callback); reqObjectKey, corsHeaders, request, log, callback);
} }
const headerValResult = validateHeaders(request.headers, const headerValResult = validateHeaders(request.headers,
@ -190,7 +191,7 @@ function websiteGet(request, log, callback) {
log.trace('header validation error', { error: err }); log.trace('header validation error', { error: err });
return _errorActions(err, websiteConfig.getErrorDocument(), return _errorActions(err, websiteConfig.getErrorDocument(),
routingRules, bucket, reqObjectKey, routingRules, bucket, reqObjectKey,
corsHeaders, log, callback); corsHeaders, request, log, callback);
} }
// check if object to serve has website redirect header // check if object to serve has website redirect header
// Note: AWS prioritizes website configuration rules over // Note: AWS prioritizes website configuration rules over

View File

@ -3,21 +3,19 @@ const async = require('async');
const { parseString } = require('xml2js'); const { parseString } = require('xml2js');
const AWS = require('aws-sdk'); const AWS = require('aws-sdk');
const { cleanup, DummyRequestLogger, makeAuthInfo } const { metadata } = require('arsenal').storage.metadata.inMemory.metadata;
= require('../unit/helpers'); const { cleanup, DummyRequestLogger, makeAuthInfo } = require('../unit/helpers');
const { ds } = require('arsenal').storage.data.inMemory.datastore; const { ds } = require('arsenal').storage.data.inMemory.datastore;
const { bucketPut } = require('../../lib/api/bucketPut'); const { bucketPut } = require('../../lib/api/bucketPut');
const initiateMultipartUpload const initiateMultipartUpload = require('../../lib/api/initiateMultipartUpload');
= require('../../lib/api/initiateMultipartUpload');
const objectPut = require('../../lib/api/objectPut'); const objectPut = require('../../lib/api/objectPut');
const objectPutCopyPart = require('../../lib/api/objectPutCopyPart'); const objectPutCopyPart = require('../../lib/api/objectPutCopyPart');
const DummyRequest = require('../unit/DummyRequest'); const DummyRequest = require('../unit/DummyRequest');
const { metadata } = require('arsenal').storage.metadata.inMemory.metadata;
const constants = require('../../constants'); const constants = require('../../constants');
const s3 = new AWS.S3(); const s3 = new AWS.S3();
const splitter = constants.splitter; const { splitter } = constants;
const log = new DummyRequestLogger(); const log = new DummyRequestLogger();
const canonicalID = 'accessKey1'; const canonicalID = 'accessKey1';
const authInfo = makeAuthInfo(canonicalID); const authInfo = makeAuthInfo(canonicalID);
@ -56,14 +54,14 @@ function getAwsParamsBucketMismatch(destObjName, uploadId) {
} }
function copyPutPart(bucketLoc, mpuLoc, srcObjLoc, requestHost, cb, function copyPutPart(bucketLoc, mpuLoc, srcObjLoc, requestHost, cb,
errorPutCopyPart) { errorPutCopyPart) {
const keys = getSourceAndDestKeys(); const keys = getSourceAndDestKeys();
const { sourceObjName, destObjName } = keys; const { sourceObjName, destObjName } = keys;
const post = bucketLoc ? '<?xml version="1.0" encoding="UTF-8"?>' + const post = bucketLoc ? '<?xml version="1.0" encoding="UTF-8"?>'
'<CreateBucketConfiguration ' + + '<CreateBucketConfiguration '
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' + + 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
`<LocationConstraint>${bucketLoc}</LocationConstraint>` + + `<LocationConstraint>${bucketLoc}</LocationConstraint>`
'</CreateBucketConfiguration>' : ''; + '</CreateBucketConfiguration>' : '';
const bucketPutReq = new DummyRequest({ const bucketPutReq = new DummyRequest({
bucketName, bucketName,
namespace, namespace,
@ -80,10 +78,13 @@ errorPutCopyPart) {
objectKey: destObjName, objectKey: destObjName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: `/${destObjName}?uploads`, url: `/${destObjName}?uploads`,
iamAuthzResults: false,
}; };
if (mpuLoc) { if (mpuLoc) {
initiateReq.headers = { 'host': `${bucketName}.s3.amazonaws.com`, initiateReq.headers = {
'x-amz-meta-scal-location-constraint': `${mpuLoc}` }; 'host': `${bucketName}.s3.amazonaws.com`,
'x-amz-meta-scal-location-constraint': `${mpuLoc}`,
};
} }
if (requestHost) { if (requestHost) {
initiateReq.parsedHost = requestHost; initiateReq.parsedHost = requestHost;
@ -94,10 +95,13 @@ errorPutCopyPart) {
objectKey: sourceObjName, objectKey: sourceObjName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
if (srcObjLoc) { if (srcObjLoc) {
sourceObjPutParams.headers = { 'host': `${bucketName}.s3.amazonaws.com`, sourceObjPutParams.headers = {
'x-amz-meta-scal-location-constraint': `${srcObjLoc}` }; 'host': `${bucketName}.s3.amazonaws.com`,
'x-amz-meta-scal-location-constraint': `${srcObjLoc}`,
};
} }
const sourceObjPutReq = new DummyRequest(sourceObjPutParams, body); const sourceObjPutReq = new DummyRequest(sourceObjPutParams, body);
if (requestHost) { if (requestHost) {
@ -112,8 +116,7 @@ errorPutCopyPart) {
}); });
}, },
next => { next => {
objectPut(authInfo, sourceObjPutReq, undefined, log, err => objectPut(authInfo, sourceObjPutReq, undefined, log, err => next(err));
next(err));
}, },
next => { next => {
initiateMultipartUpload(authInfo, initiateReq, log, next); initiateMultipartUpload(authInfo, initiateReq, log, next);
@ -130,8 +133,8 @@ errorPutCopyPart) {
// Need to build request in here since do not have // Need to build request in here since do not have
// uploadId until here // uploadId until here
assert.ifError(err, 'Error putting source object or initiate MPU'); assert.ifError(err, 'Error putting source object or initiate MPU');
const testUploadId = json.InitiateMultipartUploadResult. const testUploadId = json.InitiateMultipartUploadResult
UploadId[0]; .UploadId[0];
const copyPartParams = { const copyPartParams = {
bucketName, bucketName,
namespace, namespace,
@ -172,137 +175,137 @@ function assertPartList(partList, uploadId) {
} }
describeSkipIfE2E('ObjectCopyPutPart API with multiple backends', describeSkipIfE2E('ObjectCopyPutPart API with multiple backends',
function testSuite() { function testSuite() {
this.timeout(60000); this.timeout(60000);
beforeEach(() => { beforeEach(() => {
cleanup(); cleanup();
}); });
it('should copy part to mem based on mpu location', done => { it('should copy part to mem based on mpu location', done => {
copyPutPart(fileLocation, memLocation, null, 'localhost', () => { copyPutPart(fileLocation, memLocation, null, 'localhost', () => {
// object info is stored in ds beginning at index one, // object info is stored in ds beginning at index one,
// so an array length of two means only one object // so an array length of two means only one object
// was stored in mem // was stored in mem
assert.strictEqual(ds.length, 2); assert.strictEqual(ds.length, 2);
assert.deepStrictEqual(ds[1].value, body); assert.deepStrictEqual(ds[1].value, body);
done(); done();
});
});
it('should copy part to file based on mpu location', done => {
copyPutPart(memLocation, fileLocation, null, 'localhost', () => {
assert.strictEqual(ds.length, 2);
done();
});
});
it('should copy part to AWS based on mpu location', done => {
copyPutPart(memLocation, awsLocation, null, 'localhost',
(keys, uploadId) => {
assert.strictEqual(ds.length, 2);
const awsReq = getAwsParams(keys.destObjName, uploadId);
s3.listParts(awsReq, (err, partList) => {
assertPartList(partList, uploadId);
s3.abortMultipartUpload(awsReq, err => {
assert.equal(err, null, `Error aborting MPU: ${err}. ` +
`You must abort MPU with upload ID ${uploadId} manually.`);
done();
});
}); });
}); });
});
it('should copy part to mem from AWS based on mpu location', done => { it('should copy part to file based on mpu location', done => {
copyPutPart(awsLocation, memLocation, null, 'localhost', () => { copyPutPart(memLocation, fileLocation, null, 'localhost', () => {
assert.strictEqual(ds.length, 2); assert.strictEqual(ds.length, 2);
assert.deepStrictEqual(ds[1].value, body); done();
done(); });
}); });
});
it('should copy part to mem based on bucket location', done => { it('should copy part to AWS based on mpu location', done => {
copyPutPart(memLocation, null, null, 'localhost', () => { copyPutPart(memLocation, awsLocation, null, 'localhost',
(keys, uploadId) => {
assert.strictEqual(ds.length, 2);
const awsReq = getAwsParams(keys.destObjName, uploadId);
s3.listParts(awsReq, (err, partList) => {
assertPartList(partList, uploadId);
s3.abortMultipartUpload(awsReq, err => {
assert.equal(err, null, `Error aborting MPU: ${err}. `
+ `You must abort MPU with upload ID ${uploadId} manually.`);
done();
});
});
});
});
it('should copy part to mem from AWS based on mpu location', done => {
copyPutPart(awsLocation, memLocation, null, 'localhost', () => {
assert.strictEqual(ds.length, 2);
assert.deepStrictEqual(ds[1].value, body);
done();
});
});
it('should copy part to mem based on bucket location', done => {
copyPutPart(memLocation, null, null, 'localhost', () => {
// ds length should be three because both source // ds length should be three because both source
// and copied objects should be in mem // and copied objects should be in mem
assert.strictEqual(ds.length, 3); assert.strictEqual(ds.length, 3);
assert.deepStrictEqual(ds[2].value, body); assert.deepStrictEqual(ds[2].value, body);
done(); done();
});
}); });
});
it('should copy part to file based on bucket location', done => { it('should copy part to file based on bucket location', done => {
copyPutPart(fileLocation, null, null, 'localhost', () => { copyPutPart(fileLocation, null, null, 'localhost', () => {
// ds should be empty because both source and // ds should be empty because both source and
// coped objects should be in file // coped objects should be in file
assert.deepStrictEqual(ds, []); assert.deepStrictEqual(ds, []);
done(); done();
});
}); });
});
it('should copy part to AWS based on bucket location', done => { it('should copy part to AWS based on bucket location', done => {
copyPutPart(awsLocation, null, null, 'localhost', (keys, uploadId) => { copyPutPart(awsLocation, null, null, 'localhost', (keys, uploadId) => {
assert.deepStrictEqual(ds, []); assert.deepStrictEqual(ds, []);
const awsReq = getAwsParams(keys.destObjName, uploadId); const awsReq = getAwsParams(keys.destObjName, uploadId);
s3.listParts(awsReq, (err, partList) => { s3.listParts(awsReq, (err, partList) => {
assertPartList(partList, uploadId); assertPartList(partList, uploadId);
s3.abortMultipartUpload(awsReq, err => { s3.abortMultipartUpload(awsReq, err => {
assert.equal(err, null, `Error aborting MPU: ${err}. ` + assert.equal(err, null, `Error aborting MPU: ${err}. `
`You must abort MPU with upload ID ${uploadId} manually.`); + `You must abort MPU with upload ID ${uploadId} manually.`);
done(); done();
});
}); });
}); });
}); });
});
it('should copy part an object on AWS location that has bucketMatch ' + it('should copy part an object on AWS location that has bucketMatch '
'equals false to a mpu with a different AWS location', done => { + 'equals false to a mpu with a different AWS location', done => {
copyPutPart(null, awsLocation, awsLocationMismatch, 'localhost', copyPutPart(null, awsLocation, awsLocationMismatch, 'localhost',
(keys, uploadId) => { (keys, uploadId) => {
assert.deepStrictEqual(ds, []); assert.deepStrictEqual(ds, []);
const awsReq = getAwsParams(keys.destObjName, uploadId); const awsReq = getAwsParams(keys.destObjName, uploadId);
s3.listParts(awsReq, (err, partList) => { s3.listParts(awsReq, (err, partList) => {
assertPartList(partList, uploadId); assertPartList(partList, uploadId);
s3.abortMultipartUpload(awsReq, err => { s3.abortMultipartUpload(awsReq, err => {
assert.equal(err, null, `Error aborting MPU: ${err}. ` + assert.equal(err, null, `Error aborting MPU: ${err}. `
`You must abort MPU with upload ID ${uploadId} manually.`); + `You must abort MPU with upload ID ${uploadId} manually.`);
done(); done();
});
});
}); });
});
it('should copy part an object on AWS to a mpu with a different '
+ 'AWS location that has bucketMatch equals false', done => {
copyPutPart(null, awsLocationMismatch, awsLocation, 'localhost',
(keys, uploadId) => {
assert.deepStrictEqual(ds, []);
const awsReq = getAwsParamsBucketMismatch(keys.destObjName,
uploadId);
s3.listParts(awsReq, (err, partList) => {
assertPartList(partList, uploadId);
s3.abortMultipartUpload(awsReq, err => {
assert.equal(err, null, `Error aborting MPU: ${err}. `
+ `You must abort MPU with upload ID ${uploadId} manually.`);
done();
});
});
});
});
it('should return error 403 AccessDenied copying part to a '
+ 'different AWS location without object READ access',
done => {
const errorPutCopyPart = { code: 'AccessDenied', statusCode: 403 };
copyPutPart(null, awsLocation, awsLocation2, 'localhost', done,
errorPutCopyPart);
});
it('should copy part to file based on request endpoint', done => {
copyPutPart(null, null, memLocation, 'localhost', () => {
assert.strictEqual(ds.length, 2);
done();
}); });
}); });
}); });
it('should copy part an object on AWS to a mpu with a different ' +
'AWS location that has bucketMatch equals false', done => {
copyPutPart(null, awsLocationMismatch, awsLocation, 'localhost',
(keys, uploadId) => {
assert.deepStrictEqual(ds, []);
const awsReq = getAwsParamsBucketMismatch(keys.destObjName,
uploadId);
s3.listParts(awsReq, (err, partList) => {
assertPartList(partList, uploadId);
s3.abortMultipartUpload(awsReq, err => {
assert.equal(err, null, `Error aborting MPU: ${err}. ` +
`You must abort MPU with upload ID ${uploadId} manually.`);
done();
});
});
});
});
it('should return error 403 AccessDenied copying part to a ' +
'different AWS location without object READ access',
done => {
const errorPutCopyPart = { code: 'AccessDenied', statusCode: 403 };
copyPutPart(null, awsLocation, awsLocation2, 'localhost', done,
errorPutCopyPart);
});
it('should copy part to file based on request endpoint', done => {
copyPutPart(null, null, memLocation, 'localhost', () => {
assert.strictEqual(ds.length, 2);
done();
});
});
});

View File

@ -3,20 +3,17 @@ const async = require('async');
const crypto = require('crypto'); const crypto = require('crypto');
const { parseString } = require('xml2js'); const { parseString } = require('xml2js');
const AWS = require('aws-sdk'); const AWS = require('aws-sdk');
const { metadata } = require('arsenal').storage.metadata.inMemory.metadata;
const { config } = require('../../lib/Config'); const { config } = require('../../lib/Config');
const { cleanup, DummyRequestLogger, makeAuthInfo } const { cleanup, DummyRequestLogger, makeAuthInfo } = require('../unit/helpers');
= require('../unit/helpers');
const { ds } = require('arsenal').storage.data.inMemory.datastore; const { ds } = require('arsenal').storage.data.inMemory.datastore;
const { bucketPut } = require('../../lib/api/bucketPut'); const { bucketPut } = require('../../lib/api/bucketPut');
const initiateMultipartUpload const initiateMultipartUpload = require('../../lib/api/initiateMultipartUpload');
= require('../../lib/api/initiateMultipartUpload');
const objectPutPart = require('../../lib/api/objectPutPart'); const objectPutPart = require('../../lib/api/objectPutPart');
const DummyRequest = require('../unit/DummyRequest'); const DummyRequest = require('../unit/DummyRequest');
const { metadata } = require('arsenal').storage.metadata.inMemory.metadata;
const mdWrapper = require('../../lib/metadata/wrapper'); const mdWrapper = require('../../lib/metadata/wrapper');
const constants = require('../../constants'); const constants = require('../../constants');
const { getRealAwsConfig } = const { getRealAwsConfig } = require('../functional/aws-node-sdk/test/support/awsConfig');
require('../functional/aws-node-sdk/test/support/awsConfig');
const memLocation = 'scality-internal-mem'; const memLocation = 'scality-internal-mem';
const fileLocation = 'scality-internal-file'; const fileLocation = 'scality-internal-file';
@ -25,7 +22,7 @@ const awsLocationMismatch = 'awsbackendmismatch';
const awsConfig = getRealAwsConfig(awsLocation); const awsConfig = getRealAwsConfig(awsLocation);
const s3 = new AWS.S3(awsConfig); const s3 = new AWS.S3(awsConfig);
const splitter = constants.splitter; const { splitter } = constants;
const log = new DummyRequestLogger(); const log = new DummyRequestLogger();
const canonicalID = 'accessKey1'; const canonicalID = 'accessKey1';
const authInfo = makeAuthInfo(canonicalID); const authInfo = makeAuthInfo(canonicalID);
@ -47,13 +44,13 @@ function _getOverviewKey(objectKey, uploadId) {
} }
function putPart(bucketLoc, mpuLoc, requestHost, cb, function putPart(bucketLoc, mpuLoc, requestHost, cb,
errorDescription) { errorDescription) {
const objectName = `objectName-${Date.now()}`; const objectName = `objectName-${Date.now()}`;
const post = bucketLoc ? '<?xml version="1.0" encoding="UTF-8"?>' + const post = bucketLoc ? '<?xml version="1.0" encoding="UTF-8"?>'
'<CreateBucketConfiguration ' + + '<CreateBucketConfiguration '
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' + + 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
`<LocationConstraint>${bucketLoc}</LocationConstraint>` + + `<LocationConstraint>${bucketLoc}</LocationConstraint>`
'</CreateBucketConfiguration>' : ''; + '</CreateBucketConfiguration>' : '';
const bucketPutReq = { const bucketPutReq = {
bucketName, bucketName,
namespace, namespace,
@ -70,10 +67,13 @@ errorDescription) {
objectKey: objectName, objectKey: objectName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: `/${objectName}?uploads`, url: `/${objectName}?uploads`,
iamAuthzResults: false,
}; };
if (mpuLoc) { if (mpuLoc) {
initiateReq.headers = { 'host': `${bucketName}.s3.amazonaws.com`, initiateReq.headers = {
'x-amz-meta-scal-location-constraint': `${mpuLoc}` }; 'host': `${bucketName}.s3.amazonaws.com`,
'x-amz-meta-scal-location-constraint': `${mpuLoc}`,
};
} }
if (requestHost) { if (requestHost) {
initiateReq.parsedHost = requestHost; initiateReq.parsedHost = requestHost;
@ -123,9 +123,9 @@ errorDescription) {
const partReq = new DummyRequest(partReqParams, body1); const partReq = new DummyRequest(partReqParams, body1);
return objectPutPart(authInfo, partReq, undefined, log, err => { return objectPutPart(authInfo, partReq, undefined, log, err => {
assert.strictEqual(err, null); assert.strictEqual(err, null);
if (bucketLoc !== awsLocation && mpuLoc !== awsLocation && if (bucketLoc !== awsLocation && mpuLoc !== awsLocation
bucketLoc !== awsLocationMismatch && && bucketLoc !== awsLocationMismatch
mpuLoc !== awsLocationMismatch) { && mpuLoc !== awsLocationMismatch) {
const keysInMPUkeyMap = []; const keysInMPUkeyMap = [];
metadata.keyMaps.get(mpuBucket).forEach((val, key) => { metadata.keyMaps.get(mpuBucket).forEach((val, key) => {
keysInMPUkeyMap.push(key); keysInMPUkeyMap.push(key);
@ -138,7 +138,7 @@ errorDescription) {
}); });
const partKey = sortedKeyMap[1]; const partKey = sortedKeyMap[1];
const partETag = metadata.keyMaps.get(mpuBucket) const partETag = metadata.keyMaps.get(mpuBucket)
.get(partKey)['content-md5']; .get(partKey)['content-md5'];
assert.strictEqual(keysInMPUkeyMap.length, 2); assert.strictEqual(keysInMPUkeyMap.length, 2);
assert.strictEqual(partETag, calculatedHash1); assert.strictEqual(partETag, calculatedHash1);
} }
@ -148,8 +148,8 @@ errorDescription) {
} }
function listAndAbort(uploadId, calculatedHash2, objectName, done) { function listAndAbort(uploadId, calculatedHash2, objectName, done) {
const awsBucket = config.locationConstraints[awsLocation]. const awsBucket = config.locationConstraints[awsLocation]
details.bucketName; .details.bucketName;
const params = { const params = {
Bucket: awsBucket, Bucket: awsBucket,
Key: objectName, Key: objectName,
@ -162,167 +162,169 @@ function listAndAbort(uploadId, calculatedHash2, objectName, done) {
assert.strictEqual(`"${calculatedHash2}"`, data.Parts[0].ETag); assert.strictEqual(`"${calculatedHash2}"`, data.Parts[0].ETag);
} }
s3.abortMultipartUpload(params, err => { s3.abortMultipartUpload(params, err => {
assert.equal(err, null, `Error aborting MPU: ${err}. ` + assert.equal(err, null, `Error aborting MPU: ${err}. `
`You must abort MPU with upload ID ${uploadId} manually.`); + `You must abort MPU with upload ID ${uploadId} manually.`);
done(); done();
}); });
}); });
} }
describeSkipIfE2E('objectPutPart API with multiple backends', describeSkipIfE2E('objectPutPart API with multiple backends',
function testSuite() { function testSuite() {
this.timeout(5000); this.timeout(5000);
beforeEach(() => { beforeEach(() => {
cleanup(); cleanup();
}); });
it('should upload a part to file based on mpu location', done => { it('should upload a part to file based on mpu location', done => {
putPart(memLocation, fileLocation, 'localhost', () => { putPart(memLocation, fileLocation, 'localhost', () => {
// if ds is empty, the object is not in mem, which means it // if ds is empty, the object is not in mem, which means it
// must be in file because those are the only possibilities // must be in file because those are the only possibilities
// for unit tests // for unit tests
assert.deepStrictEqual(ds, []); assert.deepStrictEqual(ds, []);
done(); done();
});
});
it('should put a part to mem based on mpu location', done => {
putPart(fileLocation, memLocation, 'localhost', () => {
assert.deepStrictEqual(ds[1].value, body1);
done();
});
});
it('should put a part to AWS based on mpu location', done => {
putPart(fileLocation, awsLocation, 'localhost',
(objectName, uploadId) => {
assert.deepStrictEqual(ds, []);
listAndAbort(uploadId, null, objectName, done);
});
});
it('should replace part if two parts uploaded with same part number to AWS',
done => {
putPart(fileLocation, awsLocation, 'localhost',
(objectName, uploadId) => {
assert.deepStrictEqual(ds, []);
const partReqParams = {
bucketName,
namespace,
objectKey: objectName,
headers: { 'host': `${bucketName}.s3.amazonaws.com`,
'x-amz-meta-scal-location-constraint': awsLocation },
url: `/${objectName}?partNumber=1&uploadId=${uploadId}`,
query: {
partNumber: '1', uploadId,
},
};
const partReq = new DummyRequest(partReqParams, body2);
objectPutPart(authInfo, partReq, undefined, log, err => {
assert.equal(err, null, `Error putting second part: ${err}`);
listAndAbort(uploadId, calculatedHash2, objectName, done);
}); });
}); });
});
it('should upload part based on mpu location even if part ' + it('should put a part to mem based on mpu location', done => {
'location constraint is specified ', done => { putPart(fileLocation, memLocation, 'localhost', () => {
putPart(fileLocation, memLocation, 'localhost', () => { assert.deepStrictEqual(ds[1].value, body1);
assert.deepStrictEqual(ds[1].value, body1); done();
done(); });
}); });
});
it('should put a part to file based on bucket location', done => { it('should put a part to AWS based on mpu location', done => {
putPart(fileLocation, null, 'localhost', () => { putPart(fileLocation, awsLocation, 'localhost',
assert.deepStrictEqual(ds, []); (objectName, uploadId) => {
done(); assert.deepStrictEqual(ds, []);
}); listAndAbort(uploadId, null, objectName, done);
});
it('should put a part to mem based on bucket location', done => {
putPart(memLocation, null, 'localhost', () => {
assert.deepStrictEqual(ds[1].value, body1);
done();
});
});
it('should put a part to AWS based on bucket location', done => {
putPart(awsLocation, null, 'localhost',
(objectName, uploadId) => {
assert.deepStrictEqual(ds, []);
listAndAbort(uploadId, null, objectName, done);
});
});
it('should put a part to AWS based on bucket location with bucketMatch ' +
'set to true', done => {
putPart(null, awsLocation, 'localhost',
(objectName, uploadId) => {
assert.deepStrictEqual(ds, []);
listAndAbort(uploadId, null, objectName, done);
});
});
it('should put a part to AWS based on bucket location with bucketMatch ' +
'set to false', done => {
putPart(null, awsLocationMismatch, 'localhost',
(objectName, uploadId) => {
assert.deepStrictEqual(ds, []);
listAndAbort(uploadId, null, `${bucketName}/${objectName}`, done);
});
});
it('should put a part to file based on request endpoint', done => {
putPart(null, null, 'localhost', () => {
assert.deepStrictEqual(ds, []);
done();
});
});
it('should store a part even if the MPU was initiated on legacy version',
done => {
putPart('scality-internal-mem', null, 'localhost',
(objectKey, uploadId) => {
const mputOverviewKey = _getOverviewKey(objectKey, uploadId);
mdWrapper.getObjectMD(mpuBucket, mputOverviewKey, {}, log,
(err, res) => {
// remove location constraint to mimic legacy behvior
// eslint-disable-next-line no-param-reassign
res.controllingLocationConstraint = undefined;
const md5Hash = crypto.createHash('md5');
const bufferBody = Buffer.from(body1);
const calculatedHash = md5Hash.update(bufferBody).digest('hex');
const partRequest = new DummyRequest({
bucketName,
namespace,
objectKey,
headers: { host: `${bucketName}.s3.amazonaws.com` },
url: `/${objectKey}?partNumber=1&uploadId=${uploadId}`,
query: { partNumber: '1', uploadId },
calculatedHash,
}, body1);
objectPutPart(authInfo, partRequest, undefined, log, err => {
assert.strictEqual(err, null);
const keysInMPUkeyMap = [];
metadata.keyMaps.get(mpuBucket).forEach((val, key) => {
keysInMPUkeyMap.push(key);
});
const sortedKeyMap = keysInMPUkeyMap.sort(a => {
if (a.slice(0, 8) === 'overview') {
return -1;
}
return 0;
});
const partKey = sortedKeyMap[1];
const partETag = metadata.keyMaps.get(mpuBucket)
.get(partKey)['content-md5'];
assert.strictEqual(keysInMPUkeyMap.length, 2);
assert.strictEqual(partETag, calculatedHash);
done();
}); });
});
it('should replace part if two parts uploaded with same part number to AWS',
done => {
putPart(fileLocation, awsLocation, 'localhost',
(objectName, uploadId) => {
assert.deepStrictEqual(ds, []);
const partReqParams = {
bucketName,
namespace,
objectKey: objectName,
headers: {
'host': `${bucketName}.s3.amazonaws.com`,
'x-amz-meta-scal-location-constraint': awsLocation,
},
url: `/${objectName}?partNumber=1&uploadId=${uploadId}`,
query: {
partNumber: '1', uploadId,
},
};
const partReq = new DummyRequest(partReqParams, body2);
objectPutPart(authInfo, partReq, undefined, log, err => {
assert.equal(err, null, `Error putting second part: ${err}`);
listAndAbort(uploadId, calculatedHash2, objectName, done);
});
});
});
it('should upload part based on mpu location even if part '
+ 'location constraint is specified ', done => {
putPart(fileLocation, memLocation, 'localhost', () => {
assert.deepStrictEqual(ds[1].value, body1);
done();
}); });
}); });
it('should put a part to file based on bucket location', done => {
putPart(fileLocation, null, 'localhost', () => {
assert.deepStrictEqual(ds, []);
done();
});
});
it('should put a part to mem based on bucket location', done => {
putPart(memLocation, null, 'localhost', () => {
assert.deepStrictEqual(ds[1].value, body1);
done();
});
});
it('should put a part to AWS based on bucket location', done => {
putPart(awsLocation, null, 'localhost',
(objectName, uploadId) => {
assert.deepStrictEqual(ds, []);
listAndAbort(uploadId, null, objectName, done);
});
});
it('should put a part to AWS based on bucket location with bucketMatch '
+ 'set to true', done => {
putPart(null, awsLocation, 'localhost',
(objectName, uploadId) => {
assert.deepStrictEqual(ds, []);
listAndAbort(uploadId, null, objectName, done);
});
});
it('should put a part to AWS based on bucket location with bucketMatch '
+ 'set to false', done => {
putPart(null, awsLocationMismatch, 'localhost',
(objectName, uploadId) => {
assert.deepStrictEqual(ds, []);
listAndAbort(uploadId, null, `${bucketName}/${objectName}`, done);
});
});
it('should put a part to file based on request endpoint', done => {
putPart(null, null, 'localhost', () => {
assert.deepStrictEqual(ds, []);
done();
});
});
it('should store a part even if the MPU was initiated on legacy version',
done => {
putPart('scality-internal-mem', null, 'localhost',
(objectKey, uploadId) => {
const mputOverviewKey = _getOverviewKey(objectKey, uploadId);
mdWrapper.getObjectMD(mpuBucket, mputOverviewKey, {}, log,
(err, res) => {
// remove location constraint to mimic legacy behvior
// eslint-disable-next-line no-param-reassign
res.controllingLocationConstraint = undefined;
const md5Hash = crypto.createHash('md5');
const bufferBody = Buffer.from(body1);
const calculatedHash = md5Hash.update(bufferBody).digest('hex');
const partRequest = new DummyRequest({
bucketName,
namespace,
objectKey,
headers: { host: `${bucketName}.s3.amazonaws.com` },
url: `/${objectKey}?partNumber=1&uploadId=${uploadId}`,
query: { partNumber: '1', uploadId },
calculatedHash,
}, body1);
objectPutPart(authInfo, partRequest, undefined, log, err => {
assert.strictEqual(err, null);
const keysInMPUkeyMap = [];
metadata.keyMaps.get(mpuBucket).forEach((val, key) => {
keysInMPUkeyMap.push(key);
});
const sortedKeyMap = keysInMPUkeyMap.sort(a => {
if (a.slice(0, 8) === 'overview') {
return -1;
}
return 0;
});
const partKey = sortedKeyMap[1];
const partETag = metadata.keyMaps.get(mpuBucket)
.get(partKey)['content-md5'];
assert.strictEqual(keysInMPUkeyMap.length, 2);
assert.strictEqual(partETag, calculatedHash);
done();
});
});
});
});
}); });
});

View File

@ -63,6 +63,7 @@ const baseGetRequest = {
bucketName, bucketName,
namespace, namespace,
headers: { host: '/' }, headers: { host: '/' },
iamAuthzResults: false,
}; };
const baseUrl = `/${bucketName}`; const baseUrl = `/${bucketName}`;

View File

@ -25,6 +25,7 @@ describe('bucketGetACL API', () => {
namespace, namespace,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const testGetACLRequest = { const testGetACLRequest = {
bucketName, bucketName,
@ -32,6 +33,7 @@ describe('bucketGetACL API', () => {
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
it('should get a canned private ACL', done => { it('should get a canned private ACL', done => {
@ -44,6 +46,7 @@ describe('bucketGetACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
async.waterfall([ async.waterfall([
@ -76,6 +79,7 @@ describe('bucketGetACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
async.waterfall([ async.waterfall([
@ -119,6 +123,7 @@ describe('bucketGetACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
async.waterfall([ async.waterfall([
@ -156,6 +161,7 @@ describe('bucketGetACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
async.waterfall([ async.waterfall([
@ -194,6 +200,7 @@ describe('bucketGetACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
async.waterfall([ async.waterfall([
@ -248,6 +255,7 @@ describe('bucketGetACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
const canonicalIDforSample1 = const canonicalIDforSample1 =
'79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be'; '79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be';
@ -338,6 +346,7 @@ describe('bucketGetACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
async.waterfall([ async.waterfall([
@ -377,6 +386,7 @@ describe('bucketGetACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
async.waterfall([ async.waterfall([

View File

@ -16,6 +16,7 @@ const testBucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
function _makeCorsRequest(xml) { function _makeCorsRequest(xml) {
@ -26,6 +27,7 @@ function _makeCorsRequest(xml) {
}, },
url: '/?cors', url: '/?cors',
query: { cors: '' }, query: { cors: '' },
iamAuthzResults: false,
}; };
if (xml) { if (xml) {

View File

@ -17,6 +17,7 @@ const testBucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
describe('getBucketLifecycle API', () => { describe('getBucketLifecycle API', () => {

View File

@ -16,6 +16,7 @@ const testBucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const testGetLocationRequest = { const testGetLocationRequest = {
@ -25,6 +26,7 @@ const testGetLocationRequest = {
}, },
url: '/?location', url: '/?location',
query: { location: '' }, query: { location: '' },
iamAuthzResults: false,
}; };
const locationConstraints = config.locationConstraints; const locationConstraints = config.locationConstraints;

View File

@ -15,6 +15,7 @@ const testBucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
function getNotificationRequest(bucketName, xml) { function getNotificationRequest(bucketName, xml) {
@ -23,6 +24,7 @@ function getNotificationRequest(bucketName, xml) {
headers: { headers: {
host: `${bucketName}.s3.amazonaws.com`, host: `${bucketName}.s3.amazonaws.com`,
}, },
iamAuthzResults: false,
}; };
if (xml) { if (xml) {
request.post = xml; request.post = xml;

View File

@ -14,6 +14,7 @@ const bucketPutReq = {
host: `${bucketName}.s3.amazonaws.com`, host: `${bucketName}.s3.amazonaws.com`,
}, },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const testBucketPutReqWithObjLock = { const testBucketPutReqWithObjLock = {
@ -23,6 +24,7 @@ const testBucketPutReqWithObjLock = {
'x-amz-bucket-object-lock-enabled': 'True', 'x-amz-bucket-object-lock-enabled': 'True',
}, },
url: '/', url: '/',
iamAuthzResults: false,
}; };
function getObjectLockConfigRequest(bucketName, xml) { function getObjectLockConfigRequest(bucketName, xml) {
@ -33,6 +35,7 @@ function getObjectLockConfigRequest(bucketName, xml) {
'x-amz-bucket-object-lock-enabled': 'true', 'x-amz-bucket-object-lock-enabled': 'true',
}, },
url: '/?object-lock', url: '/?object-lock',
iamAuthzResults: false,
}; };
if (xml) { if (xml) {
request.post = xml; request.post = xml;

View File

@ -16,6 +16,7 @@ const testBasicRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const expectedBucketPolicy = { const expectedBucketPolicy = {
@ -34,6 +35,7 @@ const testPutPolicyRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: JSON.stringify(expectedBucketPolicy), post: JSON.stringify(expectedBucketPolicy),
iamAuthzResults: false,
}; };
describe('getBucketPolicy API', () => { describe('getBucketPolicy API', () => {

View File

@ -15,6 +15,7 @@ const testBucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
function _makeWebsiteRequest(xml) { function _makeWebsiteRequest(xml) {
@ -25,6 +26,7 @@ function _makeWebsiteRequest(xml) {
}, },
url: '/?website', url: '/?website',
query: { website: '' }, query: { website: '' },
iamAuthzResults: false,
}; };
if (xml) { if (xml) {

View File

@ -18,11 +18,10 @@ const testBucketPutRequest = {
namespace, namespace,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const canonicalIDforSample1 = const canonicalIDforSample1 = '79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be';
'79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2be'; const canonicalIDforSample2 = '79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2bf';
const canonicalIDforSample2 =
'79a59df900b949e55d96a1e698fbacedfd6e09d98eacf8f8d5218e7cd47ef2bf';
const invalidIds = { const invalidIds = {
'too short': 'id="invalid_id"', 'too short': 'id="invalid_id"',
@ -42,11 +41,10 @@ describe('putBucketACL API', () => {
afterEach(() => cleanup()); afterEach(() => cleanup());
it('should parse a grantheader', () => { it('should parse a grantheader', () => {
const grantRead = const grantRead = `uri=${constants.logId}, `
`uri=${constants.logId}, ` + + 'emailAddress="test@testing.com", '
'emailAddress="test@testing.com", ' + + 'emailAddress="test2@testly.com", '
'emailAddress="test2@testly.com", ' + + 'id="sdfsdfsfwwiieohefs"';
'id="sdfsdfsfwwiieohefs"';
const grantReadHeader = aclUtils.parseGrant(grantRead, 'read'); const grantReadHeader = aclUtils.parseGrant(grantRead, 'read');
const firstIdentifier = grantReadHeader[0].identifier; const firstIdentifier = grantReadHeader[0].identifier;
assert.strictEqual(firstIdentifier, constants.logId); assert.strictEqual(firstIdentifier, constants.logId);
@ -58,7 +56,7 @@ describe('putBucketACL API', () => {
assert.strictEqual(fourthIdentifier, 'sdfsdfsfwwiieohefs'); assert.strictEqual(fourthIdentifier, 'sdfsdfsfwwiieohefs');
const fourthType = grantReadHeader[3].userIDType; const fourthType = grantReadHeader[3].userIDType;
assert.strictEqual(fourthType, 'id'); assert.strictEqual(fourthType, 'id');
const grantType = grantReadHeader[3].grantType; const { grantType } = grantReadHeader[3];
assert.strictEqual(grantType, 'read'); assert.strictEqual(grantType, 'read');
}); });
@ -72,6 +70,7 @@ describe('putBucketACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
@ -90,6 +89,7 @@ describe('putBucketACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
assert.strictEqual(err, undefined); assert.strictEqual(err, undefined);
@ -111,6 +111,7 @@ describe('putBucketACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
const testACLRequest2 = { const testACLRequest2 = {
bucketName, bucketName,
@ -121,6 +122,7 @@ describe('putBucketACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
assert.strictEqual(err, undefined); assert.strictEqual(err, undefined);
@ -130,7 +132,7 @@ describe('putBucketACL API', () => {
assert.strictEqual(err, undefined); assert.strictEqual(err, undefined);
metadata.getBucket(bucketName, log, (err, md) => { metadata.getBucket(bucketName, log, (err, md) => {
assert.strictEqual(md.getAcl().Canned, assert.strictEqual(md.getAcl().Canned,
'authenticated-read'); 'authenticated-read');
done(); done();
}); });
}); });
@ -138,8 +140,8 @@ describe('putBucketACL API', () => {
}); });
}); });
it('should set a canned private ACL ' + it('should set a canned private ACL '
'followed by a log-delivery-write ACL', done => { + 'followed by a log-delivery-write ACL', done => {
const testACLRequest = { const testACLRequest = {
bucketName, bucketName,
namespace, namespace,
@ -149,6 +151,7 @@ describe('putBucketACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
const testACLRequest2 = { const testACLRequest2 = {
bucketName, bucketName,
@ -159,6 +162,7 @@ describe('putBucketACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
@ -169,7 +173,7 @@ describe('putBucketACL API', () => {
assert.strictEqual(err, undefined); assert.strictEqual(err, undefined);
metadata.getBucket(bucketName, log, (err, md) => { metadata.getBucket(bucketName, log, (err, md) => {
assert.strictEqual(md.getAcl().Canned, assert.strictEqual(md.getAcl().Canned,
'log-delivery-write'); 'log-delivery-write');
done(); done();
}); });
}); });
@ -184,19 +188,20 @@ describe('putBucketACL API', () => {
headers: { headers: {
'host': `${bucketName}.s3.amazonaws.com`, 'host': `${bucketName}.s3.amazonaws.com`,
'x-amz-grant-full-control': 'x-amz-grant-full-control':
'emailaddress="sampleaccount1@sampling.com"' + 'emailaddress="sampleaccount1@sampling.com"'
',emailaddress="sampleaccount2@sampling.com"', + ',emailaddress="sampleaccount2@sampling.com"',
'x-amz-grant-read': `uri=${constants.logId}`, 'x-amz-grant-read': `uri=${constants.logId}`,
'x-amz-grant-write': `uri=${constants.publicId}`, 'x-amz-grant-write': `uri=${constants.publicId}`,
'x-amz-grant-read-acp': 'x-amz-grant-read-acp':
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac' + 'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac'
'f8f8d5218e7cd47ef2be', + 'f8f8d5218e7cd47ef2be',
'x-amz-grant-write-acp': 'x-amz-grant-write-acp':
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac' + 'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac'
'f8f8d5218e7cd47ef2bf', + 'f8f8d5218e7cd47ef2bf',
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
assert.strictEqual(err, undefined); assert.strictEqual(err, undefined);
@ -223,21 +228,22 @@ describe('putBucketACL API', () => {
headers: { headers: {
'host': `${bucketName}.s3.amazonaws.com`, 'host': `${bucketName}.s3.amazonaws.com`,
'x-amz-grant-full-control': 'x-amz-grant-full-control':
'emailaddress="sampleaccount1@sampling.com"' + 'emailaddress="sampleaccount1@sampling.com"'
',emailaddress="sampleaccount2@sampling.com"', + ',emailaddress="sampleaccount2@sampling.com"',
'x-amz-grant-read': 'x-amz-grant-read':
'emailaddress="sampleaccount1@sampling.com"', 'emailaddress="sampleaccount1@sampling.com"',
'x-amz-grant-write': 'x-amz-grant-write':
'emailaddress="sampleaccount1@sampling.com"', 'emailaddress="sampleaccount1@sampling.com"',
'x-amz-grant-read-acp': 'x-amz-grant-read-acp':
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac' + 'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac'
'f8f8d5218e7cd47ef2be', + 'f8f8d5218e7cd47ef2be',
'x-amz-grant-write-acp': 'x-amz-grant-write-acp':
'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac' + 'id=79a59df900b949e55d96a1e698fbacedfd6e09d98eac'
'f8f8d5218e7cd47ef2bf', + 'f8f8d5218e7cd47ef2bf',
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
assert.strictEqual(err, undefined); assert.strictEqual(err, undefined);
@ -260,8 +266,8 @@ describe('putBucketACL API', () => {
}); });
Object.keys(invalidIds).forEach(idType => { Object.keys(invalidIds).forEach(idType => {
it('should return an error if grantee canonical ID provided in ACL ' + it('should return an error if grantee canonical ID provided in ACL '
`request invalid because ${idType}`, done => { + `request invalid because ${idType}`, done => {
const testACLRequest = { const testACLRequest = {
bucketName, bucketName,
namespace, namespace,
@ -271,6 +277,7 @@ describe('putBucketACL API', () => {
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
return bucketPutACL(authInfo, testACLRequest, log, err => { return bucketPutACL(authInfo, testACLRequest, log, err => {
assert.deepStrictEqual(err, errors.InvalidArgument); assert.deepStrictEqual(err, errors.InvalidArgument);
@ -279,19 +286,20 @@ describe('putBucketACL API', () => {
}); });
}); });
it('should return an error if invalid email ' + it('should return an error if invalid email '
'provided in ACL header request', done => { + 'provided in ACL header request', done => {
const testACLRequest = { const testACLRequest = {
bucketName, bucketName,
namespace, namespace,
headers: { headers: {
'host': `${bucketName}.s3.amazonaws.com`, 'host': `${bucketName}.s3.amazonaws.com`,
'x-amz-grant-full-control': 'x-amz-grant-full-control':
'emailaddress="sampleaccount1@sampling.com"' + 'emailaddress="sampleaccount1@sampling.com"'
',emailaddress="nonexistentEmail@sampling.com"', + ',emailaddress="nonexistentEmail@sampling.com"',
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
@ -305,52 +313,53 @@ describe('putBucketACL API', () => {
bucketName, bucketName,
namespace, namespace,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: '<AccessControlPolicy xmlns=' + post: '<AccessControlPolicy xmlns='
'"http://s3.amazonaws.com/doc/2006-03-01/">' + + '"http://s3.amazonaws.com/doc/2006-03-01/">'
'<Owner>' + + '<Owner>'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'</Owner>' + + '</Owner>'
'<AccessControlList>' + + '<AccessControlList>'
'<Grant>' + + '<Grant>'
'<Grantee xsi:type="CanonicalUser">' + + '<Grantee xsi:type="CanonicalUser">'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'</Grantee>' + + '</Grantee>'
'<Permission>FULL_CONTROL</Permission>' + + '<Permission>FULL_CONTROL</Permission>'
'</Grant>' + + '</Grant>'
'<Grant>' + + '<Grant>'
'<Grantee xsi:type="Group">' + + '<Grantee xsi:type="Group">'
`<URI>${constants.publicId}</URI>` + + `<URI>${constants.publicId}</URI>`
'</Grantee>' + + '</Grantee>'
'<Permission>READ</Permission>' + + '<Permission>READ</Permission>'
'</Grant>' + + '</Grant>'
'<Grant>' + + '<Grant>'
'<Grantee xsi:type="Group">' + + '<Grantee xsi:type="Group">'
`<URI>${constants.logId}</URI>` + + `<URI>${constants.logId}</URI>`
'</Grantee>' + + '</Grantee>'
'<Permission>WRITE</Permission>' + + '<Permission>WRITE</Permission>'
'</Grant>' + + '</Grant>'
'<Grant>' + + '<Grant>'
'<Grantee xsi:type="AmazonCustomerByEmail">' + + '<Grantee xsi:type="AmazonCustomerByEmail">'
'<EmailAddress>sampleaccount1@sampling.com' + + '<EmailAddress>sampleaccount1@sampling.com'
'</EmailAddress>' + + '</EmailAddress>'
'</Grantee>' + + '</Grantee>'
'<Permission>WRITE_ACP</Permission>' + + '<Permission>WRITE_ACP</Permission>'
'</Grant>' + + '</Grant>'
'<Grant>' + + '<Grant>'
'<Grantee xsi:type="CanonicalUser">' + + '<Grantee xsi:type="CanonicalUser">'
'<ID>79a59df900b949e55d96a1e698fbacedfd' + + '<ID>79a59df900b949e55d96a1e698fbacedfd'
'6e09d98eacf8f8d5218e7cd47ef2bf</ID>' + + '6e09d98eacf8f8d5218e7cd47ef2bf</ID>'
'</Grantee>' + + '</Grantee>'
'<Permission>READ_ACP</Permission>' + + '<Permission>READ_ACP</Permission>'
'</Grant>' + + '</Grant>'
'</AccessControlList>' + + '</AccessControlList>'
'</AccessControlPolicy>', + '</AccessControlPolicy>',
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
@ -362,7 +371,7 @@ describe('putBucketACL API', () => {
assert.strictEqual(md.getAcl().READ[0], constants.publicId); assert.strictEqual(md.getAcl().READ[0], constants.publicId);
assert.strictEqual(md.getAcl().WRITE[0], constants.logId); assert.strictEqual(md.getAcl().WRITE[0], constants.logId);
assert.strictEqual(md.getAcl().WRITE_ACP[0], assert.strictEqual(md.getAcl().WRITE_ACP[0],
canonicalIDforSample1); canonicalIDforSample1);
assert.strictEqual(md.getAcl().READ_ACP[0], assert.strictEqual(md.getAcl().READ_ACP[0],
canonicalIDforSample2); canonicalIDforSample2);
done(); done();
@ -375,17 +384,18 @@ describe('putBucketACL API', () => {
bucketName, bucketName,
namespace, namespace,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: '<AccessControlPolicy xmlns=' + post: '<AccessControlPolicy xmlns='
'"http://s3.amazonaws.com/doc/2006-03-01/">' + + '"http://s3.amazonaws.com/doc/2006-03-01/">'
'<Owner>' + + '<Owner>'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'</Owner>' + + '</Owner>'
'<AccessControlList></AccessControlList>' + + '<AccessControlList></AccessControlList>'
'</AccessControlPolicy>', + '</AccessControlPolicy>',
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
@ -403,64 +413,66 @@ describe('putBucketACL API', () => {
}); });
it('should not be able to set ACLs without AccessControlList section', it('should not be able to set ACLs without AccessControlList section',
done => { done => {
const testACLRequest = { const testACLRequest = {
bucketName, bucketName,
namespace, namespace,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: '<AccessControlPolicy xmlns=' + post: '<AccessControlPolicy xmlns='
'"http://s3.amazonaws.com/doc/2006-03-01/">' + + '"http://s3.amazonaws.com/doc/2006-03-01/">'
'<Owner>' + + '<Owner>'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'</Owner>' + + '</Owner>'
'</AccessControlPolicy>', + '</AccessControlPolicy>',
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
}; iamAuthzResults: false,
};
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
assert.deepStrictEqual(err, errors.MalformedACLError); assert.deepStrictEqual(err, errors.MalformedACLError);
done(); done();
});
}); });
});
it('should return an error if multiple AccessControlList section', done => { it('should return an error if multiple AccessControlList section', done => {
const testACLRequest = { const testACLRequest = {
bucketName, bucketName,
namespace, namespace,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: '<AccessControlPolicy xmlns=' + post: '<AccessControlPolicy xmlns='
'"http://s3.amazonaws.com/doc/2006-03-01/">' + + '"http://s3.amazonaws.com/doc/2006-03-01/">'
'<Owner>' + + '<Owner>'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'</Owner>' + + '</Owner>'
'<AccessControlList>' + + '<AccessControlList>'
'<Grant>' + + '<Grant>'
'<Grantee xsi:type="CanonicalUser">' + + '<Grantee xsi:type="CanonicalUser">'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'</Grantee>' + + '</Grantee>'
'<Permission>FULL_CONTROL</Permission>' + + '<Permission>FULL_CONTROL</Permission>'
'</Grant>' + + '</Grant>'
'</AccessControlList>' + + '</AccessControlList>'
'<AccessControlList>' + + '<AccessControlList>'
'<Grant>' + + '<Grant>'
'<Grantee xsi:type="CanonicalUser">' + + '<Grantee xsi:type="CanonicalUser">'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'</Grantee>' + + '</Grantee>'
'<Permission>READ</Permission>' + + '<Permission>READ</Permission>'
'</Grant>' + + '</Grant>'
'</AccessControlList>' + + '</AccessControlList>'
'</AccessControlPolicy>', + '</AccessControlPolicy>',
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
@ -469,30 +481,31 @@ describe('putBucketACL API', () => {
}); });
}); });
it('should return an error if invalid grantee user ID ' + it('should return an error if invalid grantee user ID '
'provided in ACL request body', done => { + 'provided in ACL request body', done => {
const testACLRequest = { const testACLRequest = {
bucketName, bucketName,
namespace, namespace,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: '<AccessControlPolicy xmlns=' + post: '<AccessControlPolicy xmlns='
'"http://s3.amazonaws.com/doc/2006-03-01/">' + + '"http://s3.amazonaws.com/doc/2006-03-01/">'
'<Owner>' + + '<Owner>'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'</Owner>' + + '</Owner>'
'<AccessControlList>' + + '<AccessControlList>'
'<Grant>' + + '<Grant>'
'<Grantee xsi:type="CanonicalUser">' + + '<Grantee xsi:type="CanonicalUser">'
'<ID>invalid_id</ID>' + + '<ID>invalid_id</ID>'
'</Grantee>' + + '</Grantee>'
'<Permission>READ_ACP</Permission>' + + '<Permission>READ_ACP</Permission>'
'</Grant>' + + '</Grant>'
'</AccessControlList>' + + '</AccessControlList>'
'</AccessControlPolicy>', + '</AccessControlPolicy>',
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
return bucketPutACL(authInfo, testACLRequest, log, err => { return bucketPutACL(authInfo, testACLRequest, log, err => {
@ -501,30 +514,31 @@ describe('putBucketACL API', () => {
}); });
}); });
it('should return an error if invalid email ' + it('should return an error if invalid email '
'address provided in ACLs set out in request body', done => { + 'address provided in ACLs set out in request body', done => {
const testACLRequest = { const testACLRequest = {
bucketName, bucketName,
namespace, namespace,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: '<AccessControlPolicy xmlns=' + post: '<AccessControlPolicy xmlns='
'"http://s3.amazonaws.com/doc/2006-03-01/">' + + '"http://s3.amazonaws.com/doc/2006-03-01/">'
'<Owner>' + + '<Owner>'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'</Owner>' + + '</Owner>'
'<AccessControlList>' + + '<AccessControlList>'
'<Grant>' + + '<Grant>'
'<Grantee xsi:type="AmazonCustomerByEmail">' + + '<Grantee xsi:type="AmazonCustomerByEmail">'
'<EmailAddress>xyz@amazon.com</EmailAddress>' + + '<EmailAddress>xyz@amazon.com</EmailAddress>'
'</Grantee>' + + '</Grantee>'
'<Permission>WRITE_ACP</Permission>' + + '<Permission>WRITE_ACP</Permission>'
'</Grant>' + + '</Grant>'
'</AccessControlList>' + + '</AccessControlList>'
'</AccessControlPolicy>', + '</AccessControlPolicy>',
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
assert.deepStrictEqual(err, errors.UnresolvableGrantByEmailAddress); assert.deepStrictEqual(err, errors.UnresolvableGrantByEmailAddress);
@ -542,24 +556,25 @@ describe('putBucketACL API', () => {
* "Grant" which is part of the s3 xml scheme for ACLs * "Grant" which is part of the s3 xml scheme for ACLs
* so an error should be returned * so an error should be returned
*/ */
post: '<AccessControlPolicy xmlns=' + post: '<AccessControlPolicy xmlns='
'"http://s3.amazonaws.com/doc/2006-03-01/">' + + '"http://s3.amazonaws.com/doc/2006-03-01/">'
'<Owner>' + + '<Owner>'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'</Owner>' + + '</Owner>'
'<AccessControlList>' + + '<AccessControlList>'
'<PowerGrant>' + + '<PowerGrant>'
'<Grantee xsi:type="AmazonCustomerByEmail">' + + '<Grantee xsi:type="AmazonCustomerByEmail">'
'<EmailAddress>xyz@amazon.com</EmailAddress>' + + '<EmailAddress>xyz@amazon.com</EmailAddress>'
'</Grantee>' + + '</Grantee>'
'<Permission>WRITE_ACP</Permission>' + + '<Permission>WRITE_ACP</Permission>'
'</PowerGrant>' + + '</PowerGrant>'
'</AccessControlList>' + + '</AccessControlList>'
'</AccessControlPolicy>', + '</AccessControlPolicy>',
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
@ -579,32 +594,33 @@ describe('putBucketACL API', () => {
* "Grant" which is part of the s3 xml scheme for ACLs * "Grant" which is part of the s3 xml scheme for ACLs
* so an error should be returned * so an error should be returned
*/ */
post: '<AccessControlPolicy xmlns=' + post: '<AccessControlPolicy xmlns='
'"http://s3.amazonaws.com/doc/2006-03-01/">' + + '"http://s3.amazonaws.com/doc/2006-03-01/">'
'<Owner>' + + '<Owner>'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'</Owner>' + + '</Owner>'
'<AccessControlList>' + + '<AccessControlList>'
'<Grant>' + + '<Grant>'
'<Grantee xsi:type="CanonicalUser">' + + '<Grantee xsi:type="CanonicalUser">'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'</Grantee>' + + '</Grantee>'
'<Permission>FULL_CONTROL</Permission>' + + '<Permission>FULL_CONTROL</Permission>'
'</Grant>' + + '</Grant>'
'<PowerGrant>' + + '<PowerGrant>'
'<Grantee xsi:type="AmazonCustomerByEmail">' + + '<Grantee xsi:type="AmazonCustomerByEmail">'
'<EmailAddress>xyz@amazon.com</EmailAddress>' + + '<EmailAddress>xyz@amazon.com</EmailAddress>'
'</Grantee>' + + '</Grantee>'
'<Permission>WRITE_ACP</Permission>' + + '<Permission>WRITE_ACP</Permission>'
'</PowerGrant>' + + '</PowerGrant>'
'</AccessControlList>' + + '</AccessControlList>'
'</AccessControlPolicy>', + '</AccessControlPolicy>',
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
@ -622,24 +638,25 @@ describe('putBucketACL API', () => {
// so an error should be returned // so an error should be returned
post: { post: {
'<AccessControlPolicy xmlns': '<AccessControlPolicy xmlns':
'"http://s3.amazonaws.com/doc/2006-03-01/">' + '"http://s3.amazonaws.com/doc/2006-03-01/">'
'<Owner>' + + '<Owner>'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'<Owner>' + + '<Owner>'
'<AccessControlList>' + + '<AccessControlList>'
'<Grant>' + + '<Grant>'
'<Grantee xsi:type="AmazonCustomerByEmail">' + + '<Grantee xsi:type="AmazonCustomerByEmail">'
'<EmailAddress>xyz@amazon.com</EmailAddress>' + + '<EmailAddress>xyz@amazon.com</EmailAddress>'
'<Grantee>' + + '<Grantee>'
'<Permission>WRITE_ACP</Permission>' + + '<Permission>WRITE_ACP</Permission>'
'<Grant>' + + '<Grant>'
'<AccessControlList>' + + '<AccessControlList>'
'<AccessControlPolicy>', + '<AccessControlPolicy>',
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
@ -648,32 +665,33 @@ describe('putBucketACL API', () => {
}); });
}); });
it('should return an error if invalid group ' + it('should return an error if invalid group '
'uri provided in ACLs set out in request body', done => { + 'uri provided in ACLs set out in request body', done => {
const testACLRequest = { const testACLRequest = {
bucketName, bucketName,
namespace, namespace,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
// URI in grant below is not valid group URI for s3 // URI in grant below is not valid group URI for s3
post: '<AccessControlPolicy xmlns=' + post: '<AccessControlPolicy xmlns='
'"http://s3.amazonaws.com/doc/2006-03-01/">' + + '"http://s3.amazonaws.com/doc/2006-03-01/">'
'<Owner>' + + '<Owner>'
'<ID>79a59df900b949e55d96a1e698fbaced' + + '<ID>79a59df900b949e55d96a1e698fbaced'
'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>' + + 'fd6e09d98eacf8f8d5218e7cd47ef2be</ID>'
'<DisplayName>OwnerDisplayName</DisplayName>' + + '<DisplayName>OwnerDisplayName</DisplayName>'
'</Owner>' + + '</Owner>'
'<AccessControlList>' + + '<AccessControlList>'
'<Grant>' + + '<Grant>'
'<Grantee xsi:type="Group">' + + '<Grantee xsi:type="Group">'
'<URI>http://acs.amazonaws.com/groups/' + + '<URI>http://acs.amazonaws.com/groups/'
'global/NOTAVALIDGROUP</URI>' + + 'global/NOTAVALIDGROUP</URI>'
'</Grantee>' + + '</Grantee>'
'<Permission>READ</Permission>' + + '<Permission>READ</Permission>'
'</Grant>' + + '</Grant>'
'</AccessControlList>' + + '</AccessControlList>'
'</AccessControlPolicy>', + '</AccessControlPolicy>',
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {
@ -682,19 +700,20 @@ describe('putBucketACL API', () => {
}); });
}); });
it('should return an error if invalid group uri' + it('should return an error if invalid group uri'
'provided in ACL header request', done => { + 'provided in ACL header request', done => {
const testACLRequest = { const testACLRequest = {
bucketName, bucketName,
namespace, namespace,
headers: { headers: {
'host': `${bucketName}.s3.amazonaws.com`, 'host': `${bucketName}.s3.amazonaws.com`,
'x-amz-grant-full-control': 'x-amz-grant-full-control':
'uri="http://acs.amazonaws.com/groups/' + 'uri="http://acs.amazonaws.com/groups/'
'global/NOTAVALIDGROUP"', + 'global/NOTAVALIDGROUP"',
}, },
url: '/?acl', url: '/?acl',
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPutACL(authInfo, testACLRequest, log, err => { bucketPutACL(authInfo, testACLRequest, log, err => {

View File

@ -3,13 +3,13 @@ const { errors } = require('arsenal');
const { bucketPut } = require('../../../lib/api/bucketPut'); const { bucketPut } = require('../../../lib/api/bucketPut');
const bucketPutCors = require('../../../lib/api/bucketPutCors'); const bucketPutCors = require('../../../lib/api/bucketPutCors');
const { _validator, parseCorsXml } const { _validator, parseCorsXml } = require('../../../lib/api/apiUtils/bucket/bucketCors');
= require('../../../lib/api/apiUtils/bucket/bucketCors'); const {
const { cleanup, cleanup,
DummyRequestLogger, DummyRequestLogger,
makeAuthInfo, makeAuthInfo,
CorsConfigTester } CorsConfigTester,
= require('../helpers'); } = require('../helpers');
const metadata = require('../../../lib/metadata/wrapper'); const metadata = require('../../../lib/metadata/wrapper');
const log = new DummyRequestLogger(); const log = new DummyRequestLogger();
@ -19,6 +19,7 @@ const testBucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
function _testPutBucketCors(authInfo, request, log, errCode, cb) { function _testPutBucketCors(authInfo, request, log, errCode, cb) {
@ -30,13 +31,13 @@ function _testPutBucketCors(authInfo, request, log, errCode, cb) {
} }
function _generateSampleXml(value) { function _generateSampleXml(value) {
const xml = '<CORSConfiguration>' + const xml = '<CORSConfiguration>'
'<CORSRule>' + + '<CORSRule>'
'<AllowedMethod>PUT</AllowedMethod>' + + '<AllowedMethod>PUT</AllowedMethod>'
'<AllowedOrigin>www.example.com</AllowedOrigin>' + + '<AllowedOrigin>www.example.com</AllowedOrigin>'
`${value}` + + `${value}`
'</CORSRule>' + + '</CORSRule>'
'</CORSConfiguration>'; + '</CORSConfiguration>';
return xml; return xml;
} }
@ -125,8 +126,8 @@ describe('PUT bucket cors :: helper validation functions ', () => {
it('should return MalformedXML if more than one ID per rule', done => { it('should return MalformedXML if more than one ID per rule', done => {
const testValue = 'testid'; const testValue = 'testid';
const xml = _generateSampleXml(`<ID>${testValue}</ID>` + const xml = _generateSampleXml(`<ID>${testValue}</ID>`
`<ID>${testValue}</ID>`); + `<ID>${testValue}</ID>`);
parseCorsXml(xml, log, err => { parseCorsXml(xml, log, err => {
assert(err, 'Expected error but found none'); assert(err, 'Expected error but found none');
assert.deepStrictEqual(err, errors.MalformedXML); assert.deepStrictEqual(err, errors.MalformedXML);
@ -157,8 +158,8 @@ describe('PUT bucket cors :: helper validation functions ', () => {
describe('validateMaxAgeSeconds ', () => { describe('validateMaxAgeSeconds ', () => {
it('should validate successfully for valid value', done => { it('should validate successfully for valid value', done => {
const testValue = 60; const testValue = 60;
const xml = _generateSampleXml(`<MaxAgeSeconds>${testValue}` + const xml = _generateSampleXml(`<MaxAgeSeconds>${testValue}`
'</MaxAgeSeconds>'); + '</MaxAgeSeconds>');
parseCorsXml(xml, log, (err, result) => { parseCorsXml(xml, log, (err, result) => {
assert.strictEqual(err, null, `Found unexpected err ${err}`); assert.strictEqual(err, null, `Found unexpected err ${err}`);
assert.strictEqual(typeof result[0].maxAgeSeconds, 'number'); assert.strictEqual(typeof result[0].maxAgeSeconds, 'number');
@ -167,12 +168,13 @@ describe('PUT bucket cors :: helper validation functions ', () => {
}); });
}); });
it('should return MalformedXML if more than one MaxAgeSeconds ' + it('should return MalformedXML if more than one MaxAgeSeconds '
'per rule', done => { + 'per rule', done => {
const testValue = '60'; const testValue = '60';
const xml = _generateSampleXml( const xml = _generateSampleXml(
`<MaxAgeSeconds>${testValue}</MaxAgeSeconds>` + `<MaxAgeSeconds>${testValue}</MaxAgeSeconds>`
`<MaxAgeSeconds>${testValue}</MaxAgeSeconds>`); + `<MaxAgeSeconds>${testValue}</MaxAgeSeconds>`,
);
parseCorsXml(xml, log, err => { parseCorsXml(xml, log, err => {
assert(err, 'Expected error but found none'); assert(err, 'Expected error but found none');
assert.deepStrictEqual(err, errors.MalformedXML); assert.deepStrictEqual(err, errors.MalformedXML);
@ -182,8 +184,8 @@ describe('PUT bucket cors :: helper validation functions ', () => {
it('should validate & return undefined if empty value', done => { it('should validate & return undefined if empty value', done => {
const testValue = ''; const testValue = '';
const xml = _generateSampleXml(`<MaxAgeSeconds>${testValue}` + const xml = _generateSampleXml(`<MaxAgeSeconds>${testValue}`
'</MaxAgeSeconds>'); + '</MaxAgeSeconds>');
parseCorsXml(xml, log, (err, result) => { parseCorsXml(xml, log, (err, result) => {
assert.strictEqual(err, null, `Found unexpected err ${err}`); assert.strictEqual(err, null, `Found unexpected err ${err}`);
assert.strictEqual(result[0].MaxAgeSeconds, undefined); assert.strictEqual(result[0].MaxAgeSeconds, undefined);

View File

@ -14,6 +14,7 @@ const bucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
describe('bucketPutEncryption API', () => { describe('bucketPutEncryption API', () => {
@ -32,10 +33,11 @@ describe('bucketPutEncryption API', () => {
it('should reject a config with no Rule', done => { it('should reject a config with no Rule', done => {
bucketPutEncryption(authInfo, templateRequest(bucketName, bucketPutEncryption(authInfo, templateRequest(bucketName,
{ post: `<?xml version="1.0" encoding="UTF-8"?> {
post: `<?xml version="1.0" encoding="UTF-8"?>
<ServerSideEncryptionConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> <ServerSideEncryptionConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
</ServerSideEncryptionConfiguration>`, </ServerSideEncryptionConfiguration>`,
}), log, err => { }), log, err => {
assert.strictEqual(err.is.MalformedXML, true); assert.strictEqual(err.is.MalformedXML, true);
done(); done();
}); });
@ -43,11 +45,12 @@ describe('bucketPutEncryption API', () => {
it('should reject a config with no ApplyServerSideEncryptionByDefault section', done => { it('should reject a config with no ApplyServerSideEncryptionByDefault section', done => {
bucketPutEncryption(authInfo, templateRequest(bucketName, bucketPutEncryption(authInfo, templateRequest(bucketName,
{ post: `<?xml version="1.0" encoding="UTF-8"?> {
post: `<?xml version="1.0" encoding="UTF-8"?>
<ServerSideEncryptionConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/"> <ServerSideEncryptionConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Rule></Rule> <Rule></Rule>
</ServerSideEncryptionConfiguration>`, </ServerSideEncryptionConfiguration>`,
}), log, err => { }), log, err => {
assert.strictEqual(err.is.MalformedXML, true); assert.strictEqual(err.is.MalformedXML, true);
done(); done();
}); });
@ -155,33 +158,32 @@ describe('bucketPutEncryption API', () => {
}); });
}); });
it('should update SSEAlgorithm if existing SSEAlgorithm is AES256, ' + it('should update SSEAlgorithm if existing SSEAlgorithm is AES256, '
'new SSEAlgorithm is aws:kms and no KMSMasterKeyID is provided', + 'new SSEAlgorithm is aws:kms and no KMSMasterKeyID is provided',
done => { done => {
const post = templateSSEConfig({ algorithm: 'AES256' }); const post = templateSSEConfig({ algorithm: 'AES256' });
bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => {
assert.ifError(err);
return getSSEConfig(bucketName, log, (err, sseInfo) => {
assert.ifError(err); assert.ifError(err);
return getSSEConfig(bucketName, log, (err, sseInfo) => { const { masterKeyId } = sseInfo;
assert.ifError(err); const newConf = templateSSEConfig({ algorithm: 'aws:kms' });
const { masterKeyId } = sseInfo; return bucketPutEncryption(authInfo, templateRequest(bucketName, { post: newConf }), log,
const newConf = templateSSEConfig({ algorithm: 'aws:kms' }); err => {
return bucketPutEncryption(authInfo, templateRequest(bucketName, { post: newConf }), log, assert.ifError(err);
err => { return getSSEConfig(bucketName, log, (err, updatedSSEInfo) => {
assert.ifError(err); assert.deepStrictEqual(updatedSSEInfo, {
return getSSEConfig(bucketName, log, (err, updatedSSEInfo) => { mandatory: true,
assert.deepStrictEqual(updatedSSEInfo, { algorithm: 'aws:kms',
mandatory: true, cryptoScheme: 1,
algorithm: 'aws:kms', masterKeyId,
cryptoScheme: 1,
masterKeyId,
});
done();
}); });
} done();
); });
}); });
}); });
}); });
});
it('should update SSEAlgorithm to aws:kms and set KMSMasterKeyID', done => { it('should update SSEAlgorithm to aws:kms and set KMSMasterKeyID', done => {
const post = templateSSEConfig({ algorithm: 'AES256' }); const post = templateSSEConfig({ algorithm: 'AES256' });

View File

@ -17,6 +17,7 @@ const testBucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const expectedLifecycleConfig = { const expectedLifecycleConfig = {

View File

@ -15,6 +15,7 @@ const bucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const expectedNotifConfig = { const expectedNotifConfig = {
@ -52,6 +53,7 @@ function getNotifRequest(empty) {
host: `${bucketName}.s3.amazonaws.com`, host: `${bucketName}.s3.amazonaws.com`,
}, },
post: notifXml, post: notifXml,
iamAuthzResults: false,
}; };
return putNotifConfigRequest; return putNotifConfigRequest;
} }

View File

@ -15,6 +15,7 @@ const bucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const objectLockXml = '<ObjectLockConfiguration ' + const objectLockXml = '<ObjectLockConfiguration ' +
@ -30,6 +31,7 @@ const putObjLockRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: objectLockXml, post: objectLockXml,
iamAuthzResults: false,
}; };
const expectedObjectLockConfig = { const expectedObjectLockConfig = {

View File

@ -15,6 +15,7 @@ const testBucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
let expectedBucketPolicy = {}; let expectedBucketPolicy = {};
@ -25,6 +26,7 @@ function getPolicyRequest(policy) {
host: `${bucketName}.s3.amazonaws.com`, host: `${bucketName}.s3.amazonaws.com`,
}, },
post: JSON.stringify(policy), post: JSON.stringify(policy),
iamAuthzResults: false,
}; };
} }
@ -76,7 +78,7 @@ describe('putBucketPolicy API', () => {
}); });
}); });
it('should return error if policy contains conditions', done => { it.skip('should return error if policy contains conditions', done => {
expectedBucketPolicy.Statement[0].Condition = expectedBucketPolicy.Statement[0].Condition =
{ StringEquals: { 's3:x-amz-acl': ['public-read'] } }; { StringEquals: { 's3:x-amz-acl': ['public-read'] } };
bucketPutPolicy(authInfo, getPolicyRequest(expectedBucketPolicy), log, bucketPutPolicy(authInfo, getPolicyRequest(expectedBucketPolicy), log,

View File

@ -19,6 +19,7 @@ const testBucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
function _getPutWebsiteRequest(xml) { function _getPutWebsiteRequest(xml) {
@ -29,6 +30,7 @@ function _getPutWebsiteRequest(xml) {
}, },
url: '/?website', url: '/?website',
query: { website: '' }, query: { website: '' },
iamAuthzResults: false,
}; };
request.post = xml; request.post = xml;
return request; return request;

View File

@ -47,6 +47,7 @@ describe('objectGet API', () => {
namespace, namespace,
headers: {}, headers: {},
url: `/${bucketName}`, url: `/${bucketName}`,
iamAuthzResults: false,
}; };
const userMetadataKey = 'x-amz-meta-test'; const userMetadataKey = 'x-amz-meta-test';
const userMetadataValue = 'some metadata'; const userMetadataValue = 'some metadata';
@ -56,6 +57,7 @@ describe('objectGet API', () => {
objectKey: objectName, objectKey: objectName,
headers: {}, headers: {},
url: `/${bucketName}/${objectName}`, url: `/${bucketName}/${objectName}`,
iamAuthzResults: false,
}; };
it('should get the object metadata', done => { it('should get the object metadata', done => {
@ -84,6 +86,7 @@ describe('objectGet API', () => {
'x-amz-bucket-object-lock-enabled': 'true', 'x-amz-bucket-object-lock-enabled': 'true',
}, },
url: `/${bucketName}`, url: `/${bucketName}`,
iamAuthzResults: false,
}; };
const createPutDummyRetention = (date, mode) => new DummyRequest({ const createPutDummyRetention = (date, mode) => new DummyRequest({
@ -245,6 +248,7 @@ describe('objectGet API', () => {
objectKey: objectName, objectKey: objectName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: `/${objectName}?uploads`, url: `/${objectName}?uploads`,
iamAuthzResults: false,
}; };
async.waterfall([ async.waterfall([
next => bucketPut(authInfo, testPutBucketRequest, log, next), next => bucketPut(authInfo, testPutBucketRequest, log, next),
@ -321,6 +325,7 @@ describe('objectGet API', () => {
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
query: { uploadId: testUploadId }, query: { uploadId: testUploadId },
post: completeBody, post: completeBody,
iamAuthzResults: false,
}; };
completeMultipartUpload(authInfo, completeRequest, completeMultipartUpload(authInfo, completeRequest,
log, err => { log, err => {

View File

@ -36,6 +36,7 @@ describe('objectGetACL API', () => {
'x-amz-acl': 'public-read-write', 'x-amz-acl': 'public-read-write',
}, },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const testGetACLRequest = { const testGetACLRequest = {
bucketName, bucketName,
@ -44,6 +45,7 @@ describe('objectGetACL API', () => {
objectKey: objectName, objectKey: objectName,
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
it('should get a canned private ACL', done => { it('should get a canned private ACL', done => {

View File

@ -18,6 +18,7 @@ const bucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const putObjectRequest = new DummyRequest({ const putObjectRequest = new DummyRequest({
@ -37,12 +38,14 @@ const putObjectLegalHoldRequest = status => ({
objectKey: objectName, objectKey: objectName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: objectLegalHoldXml(status), post: objectLegalHoldXml(status),
iamAuthzResults: false,
}); });
const getObjectLegalHoldRequest = { const getObjectLegalHoldRequest = {
bucketName, bucketName,
objectKey: objectName, objectKey: objectName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
iamAuthzResults: false,
}; };
describe('getObjectLegalHold API', () => { describe('getObjectLegalHold API', () => {

View File

@ -21,6 +21,7 @@ const bucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const putObjectRequest = new DummyRequest({ const putObjectRequest = new DummyRequest({
@ -42,12 +43,14 @@ const putObjRetRequest = {
objectKey: objectName, objectKey: objectName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: objectRetentionXml, post: objectRetentionXml,
iamAuthzResults: false,
}; };
const getObjRetRequest = { const getObjRetRequest = {
bucketName, bucketName,
objectKey: objectName, objectKey: objectName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
iamAuthzResults: false,
}; };
describe('getObjectRetention API', () => { describe('getObjectRetention API', () => {

View File

@ -21,6 +21,7 @@ const testBucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const testPutObjectRequest = new DummyRequest({ const testPutObjectRequest = new DummyRequest({

View File

@ -4,14 +4,16 @@ const moment = require('moment');
const { errors, s3middleware } = require('arsenal'); const { errors, s3middleware } = require('arsenal');
const sinon = require('sinon'); const sinon = require('sinon');
const { ds } = require('arsenal').storage.data.inMemory.datastore;
const { bucketPut } = require('../../../lib/api/bucketPut'); const { bucketPut } = require('../../../lib/api/bucketPut');
const bucketPutObjectLock = require('../../../lib/api/bucketPutObjectLock'); const bucketPutObjectLock = require('../../../lib/api/bucketPutObjectLock');
const bucketPutACL = require('../../../lib/api/bucketPutACL'); const bucketPutACL = require('../../../lib/api/bucketPutACL');
const bucketPutVersioning = require('../../../lib/api/bucketPutVersioning'); const bucketPutVersioning = require('../../../lib/api/bucketPutVersioning');
const { parseTagFromQuery } = s3middleware.tagging; const { parseTagFromQuery } = s3middleware.tagging;
const { cleanup, DummyRequestLogger, makeAuthInfo, versioningTestUtils } const {
= require('../helpers'); cleanup, DummyRequestLogger, makeAuthInfo, versioningTestUtils,
const { ds } = require('arsenal').storage.data.inMemory.datastore; } = require('../helpers');
const metadata = require('../metadataswitch'); const metadata = require('../metadataswitch');
const objectPut = require('../../../lib/api/objectPut'); const objectPut = require('../../../lib/api/objectPut');
const { objectLockTestUtils } = require('../helpers'); const { objectLockTestUtils } = require('../helpers');
@ -19,7 +21,7 @@ const DummyRequest = require('../DummyRequest');
const mpuUtils = require('../utils/mpuUtils'); const mpuUtils = require('../utils/mpuUtils');
const { lastModifiedHeader } = require('../../../constants'); const { lastModifiedHeader } = require('../../../constants');
const any = sinon.match.any; const { any } = sinon.match;
const log = new DummyRequestLogger(); const log = new DummyRequestLogger();
const canonicalID = 'accessKey1'; const canonicalID = 'accessKey1';
@ -49,10 +51,8 @@ const originalputObjectMD = metadata.putObjectMD;
const objectName = 'objectName'; const objectName = 'objectName';
let testPutObjectRequest; let testPutObjectRequest;
const enableVersioningRequest = const enableVersioningRequest = versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Enabled');
versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Enabled'); const suspendVersioningRequest = versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Suspended');
const suspendVersioningRequest =
versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Suspended');
function testAuth(bucketOwner, authUser, bucketPutReq, log, cb) { function testAuth(bucketOwner, authUser, bucketPutReq, log, cb) {
bucketPut(bucketOwner, bucketPutReq, log, () => { bucketPut(bucketOwner, bucketPutReq, log, () => {
@ -74,8 +74,10 @@ describe('parseTagFromQuery', () => {
const allowedChar = '+- =._:/'; const allowedChar = '+- =._:/';
const tests = [ const tests = [
{ tagging: 'key1=value1', result: { key1: 'value1' } }, { tagging: 'key1=value1', result: { key1: 'value1' } },
{ tagging: `key1=${encodeURIComponent(allowedChar)}`, {
result: { key1: allowedChar } }, tagging: `key1=${encodeURIComponent(allowedChar)}`,
result: { key1: allowedChar },
},
{ tagging: 'key1=value1=value2', error: invalidArgument }, { tagging: 'key1=value1=value2', error: invalidArgument },
{ tagging: '=value1', error: invalidArgument }, { tagging: '=value1', error: invalidArgument },
{ tagging: 'key1%=value1', error: invalidArgument }, { tagging: 'key1%=value1', error: invalidArgument },
@ -143,16 +145,14 @@ describe('objectPut API', () => {
it('should put object if user has FULL_CONTROL grant on bucket', done => { it('should put object if user has FULL_CONTROL grant on bucket', done => {
const bucketOwner = makeAuthInfo('accessKey2'); const bucketOwner = makeAuthInfo('accessKey2');
const authUser = makeAuthInfo('accessKey3'); const authUser = makeAuthInfo('accessKey3');
testPutBucketRequest.headers['x-amz-grant-full-control'] = testPutBucketRequest.headers['x-amz-grant-full-control'] = `id=${authUser.getCanonicalID()}`;
`id=${authUser.getCanonicalID()}`;
testAuth(bucketOwner, authUser, testPutBucketRequest, log, done); testAuth(bucketOwner, authUser, testPutBucketRequest, log, done);
}); });
it('should put object if user has WRITE grant on bucket', done => { it('should put object if user has WRITE grant on bucket', done => {
const bucketOwner = makeAuthInfo('accessKey2'); const bucketOwner = makeAuthInfo('accessKey2');
const authUser = makeAuthInfo('accessKey3'); const authUser = makeAuthInfo('accessKey3');
testPutBucketRequest.headers['x-amz-grant-write'] = testPutBucketRequest.headers['x-amz-grant-write'] = `id=${authUser.getCanonicalID()}`;
`id=${authUser.getCanonicalID()}`;
testAuth(bucketOwner, authUser, testPutBucketRequest, log, done); testAuth(bucketOwner, authUser, testPutBucketRequest, log, done);
}); });
@ -183,7 +183,7 @@ describe('objectPut API', () => {
{}, log, (err, md) => { {}, log, (err, md) => {
assert(md); assert(md);
assert assert
.strictEqual(md['content-md5'], correctMD5); .strictEqual(md['content-md5'], correctMD5);
done(); done();
}); });
}); });
@ -240,8 +240,8 @@ describe('objectPut API', () => {
]; ];
testObjectLockConfigs.forEach(config => { testObjectLockConfigs.forEach(config => {
const { testMode, type, val } = config; const { testMode, type, val } = config;
it('should put an object with default retention if object does not ' + it('should put an object with default retention if object does not '
'have retention configuration but bucket has', done => { + 'have retention configuration but bucket has', done => {
const testPutObjectRequest = new DummyRequest({ const testPutObjectRequest = new DummyRequest({
bucketName, bucketName,
namespace, namespace,
@ -255,6 +255,7 @@ describe('objectPut API', () => {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: objectLockTestUtils.generateXml(testMode, val, type), post: objectLockTestUtils.generateXml(testMode, val, type),
iamAuthzResults: false,
}; };
bucketPut(authInfo, testPutBucketRequestLock, log, () => { bucketPut(authInfo, testPutBucketRequestLock, log, () => {
@ -268,10 +269,8 @@ describe('objectPut API', () => {
const mode = md.retentionMode; const mode = md.retentionMode;
const retainDate = md.retentionDate; const retainDate = md.retentionDate;
const date = moment(); const date = moment();
const days const days = type === 'Days' ? val : val * 365;
= type === 'Days' ? val : val * 365; const expectedDate = date.add(days, 'days');
const expectedDate
= date.add(days, 'days');
assert.ifError(err); assert.ifError(err);
assert.strictEqual(mode, testMode); assert.strictEqual(mode, testMode);
assert.strictEqual(formatTime(retainDate), assert.strictEqual(formatTime(retainDate),
@ -365,11 +364,11 @@ describe('objectPut API', () => {
(err, md) => { (err, md) => {
assert(md); assert(md);
assert.strictEqual(md['x-amz-meta-test'], assert.strictEqual(md['x-amz-meta-test'],
'some metadata'); 'some metadata');
assert.strictEqual(md['x-amz-meta-test2'], assert.strictEqual(md['x-amz-meta-test2'],
'some more metadata'); 'some more metadata');
assert.strictEqual(md['x-amz-meta-test3'], assert.strictEqual(md['x-amz-meta-test3'],
'even more metadata'); 'even more metadata');
done(); done();
}); });
}); });
@ -438,7 +437,7 @@ describe('objectPut API', () => {
(err, md) => { (err, md) => {
assert(md); assert(md);
assert.strictEqual(md['x-amz-meta-x-scal-last-modified'], assert.strictEqual(md['x-amz-meta-x-scal-last-modified'],
imposedLastModified); imposedLastModified);
const lastModified = md['last-modified']; const lastModified = md['last-modified'];
const lastModifiedDate = lastModified.split('T')[0]; const lastModifiedDate = lastModified.split('T')[0];
const currentTs = new Date().toJSON(); const currentTs = new Date().toJSON();
@ -478,11 +477,11 @@ describe('objectPut API', () => {
assert(md); assert(md);
assert.strictEqual(md.location, null); assert.strictEqual(md.location, null);
assert.strictEqual(md['x-amz-meta-test'], assert.strictEqual(md['x-amz-meta-test'],
'some metadata'); 'some metadata');
assert.strictEqual(md['x-amz-meta-test2'], assert.strictEqual(md['x-amz-meta-test2'],
'some more metadata'); 'some more metadata');
assert.strictEqual(md['x-amz-meta-test3'], assert.strictEqual(md['x-amz-meta-test3'],
'even more metadata'); 'even more metadata');
done(); done();
}); });
}); });
@ -503,19 +502,19 @@ describe('objectPut API', () => {
undefined, log, () => { undefined, log, () => {
objectPut(authInfo, testPutObjectRequest2, undefined, objectPut(authInfo, testPutObjectRequest2, undefined,
log, log,
() => { () => {
// orphan objects don't get deleted // orphan objects don't get deleted
// until the next tick // until the next tick
// in memory // in memory
setImmediate(() => { setImmediate(() => {
// Data store starts at index 1 // Data store starts at index 1
assert.strictEqual(ds[0], undefined); assert.strictEqual(ds[0], undefined);
assert.strictEqual(ds[1], undefined); assert.strictEqual(ds[1], undefined);
assert.deepStrictEqual(ds[2].value, assert.deepStrictEqual(ds[2].value,
Buffer.from('I am another body', 'utf8')); Buffer.from('I am another body', 'utf8'));
done(); done();
});
}); });
});
}); });
}); });
}); });
@ -534,8 +533,8 @@ describe('objectPut API', () => {
}); });
}); });
it('should not put object with retention configuration if object lock ' + it('should not put object with retention configuration if object lock '
'is not enabled on the bucket', done => { + 'is not enabled on the bucket', done => {
const testPutObjectRequest = new DummyRequest({ const testPutObjectRequest = new DummyRequest({
bucketName, bucketName,
namespace, namespace,
@ -552,15 +551,14 @@ describe('objectPut API', () => {
objectPut(authInfo, testPutObjectRequest, undefined, log, err => { objectPut(authInfo, testPutObjectRequest, undefined, log, err => {
assert.deepStrictEqual(err, errors.InvalidRequest assert.deepStrictEqual(err, errors.InvalidRequest
.customizeDescription( .customizeDescription(
'Bucket is missing ObjectLockConfiguration')); 'Bucket is missing ObjectLockConfiguration',
));
done(); done();
}); });
}); });
}); });
it('should forward a 400 back to client on metadata 408 response', () => { it('should forward a 400 back to client on metadata 408 response', () => {
metadata.putObjectMD = metadata.putObjectMD = (bucketName, objName, objVal, params, log, cb) => cb({ httpCode: 408 });
(bucketName, objName, objVal, params, log, cb) =>
cb({ httpCode: 408 });
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log, objectPut(authInfo, testPutObjectRequest, undefined, log,
@ -571,9 +569,7 @@ describe('objectPut API', () => {
}); });
it('should forward a 502 to the client for 4xx != 408', () => { it('should forward a 502 to the client for 4xx != 408', () => {
metadata.putObjectMD = metadata.putObjectMD = (bucketName, objName, objVal, params, log, cb) => cb({ httpCode: 412 });
(bucketName, objName, objVal, params, log, cb) =>
cb({ httpCode: 412 });
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log, objectPut(authInfo, testPutObjectRequest, undefined, log,
@ -589,13 +585,12 @@ describe('objectPut API with versioning', () => {
cleanup(); cleanup();
}); });
const objData = ['foo0', 'foo1', 'foo2'].map(str => const objData = ['foo0', 'foo1', 'foo2'].map(str => Buffer.from(str, 'utf8'));
Buffer.from(str, 'utf8'));
const testPutObjectRequests = objData.map(data => versioningTestUtils const testPutObjectRequests = objData.map(data => versioningTestUtils
.createPutObjectRequest(bucketName, objectName, data)); .createPutObjectRequest(bucketName, objectName, data));
it('should delete latest version when creating new null version ' + it('should delete latest version when creating new null version '
'if latest version is null version', done => { + 'if latest version is null version', done => {
async.series([ async.series([
callback => bucketPut(authInfo, testPutBucketRequest, log, callback => bucketPut(authInfo, testPutBucketRequest, log,
callback), callback),
@ -633,8 +628,7 @@ describe('objectPut API with versioning', () => {
}); });
describe('when null version is not the latest version', () => { describe('when null version is not the latest version', () => {
const objData = ['foo0', 'foo1', 'foo2'].map(str => const objData = ['foo0', 'foo1', 'foo2'].map(str => Buffer.from(str, 'utf8'));
Buffer.from(str, 'utf8'));
const testPutObjectRequests = objData.map(data => versioningTestUtils const testPutObjectRequests = objData.map(data => versioningTestUtils
.createPutObjectRequest(bucketName, objectName, data)); .createPutObjectRequest(bucketName, objectName, data));
beforeEach(done => { beforeEach(done => {
@ -662,23 +656,23 @@ describe('objectPut API with versioning', () => {
}); });
it('should still delete null version when creating new null version', it('should still delete null version when creating new null version',
done => { done => {
objectPut(authInfo, testPutObjectRequests[2], undefined, objectPut(authInfo, testPutObjectRequests[2], undefined,
log, err => { log, err => {
assert.ifError(err, `Unexpected err: ${err}`); assert.ifError(err, `Unexpected err: ${err}`);
setImmediate(() => { setImmediate(() => {
// old null version should be deleted after putting // old null version should be deleted after putting
// new null version // new null version
versioningTestUtils.assertDataStoreValues(ds, versioningTestUtils.assertDataStoreValues(ds,
[undefined, objData[1], objData[2]]); [undefined, objData[1], objData[2]]);
done(err); done(err);
});
}); });
}); });
});
}); });
it('should return BadDigest error and not leave orphans in data when ' + it('should return BadDigest error and not leave orphans in data when '
'contentMD5 and completedHash do not match', done => { + 'contentMD5 and completedHash do not match', done => {
const testPutObjectRequest = new DummyRequest({ const testPutObjectRequest = new DummyRequest({
bucketName, bucketName,
namespace, namespace,
@ -690,18 +684,18 @@ describe('objectPut API with versioning', () => {
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log, objectPut(authInfo, testPutObjectRequest, undefined, log,
err => { err => {
assert.deepStrictEqual(err, errors.BadDigest); assert.deepStrictEqual(err, errors.BadDigest);
// orphan objects don't get deleted // orphan objects don't get deleted
// until the next tick // until the next tick
// in memory // in memory
setImmediate(() => { setImmediate(() => {
// Data store starts at index 1 // Data store starts at index 1
assert.strictEqual(ds[0], undefined); assert.strictEqual(ds[0], undefined);
assert.strictEqual(ds[1], undefined); assert.strictEqual(ds[1], undefined);
done(); done();
});
}); });
});
}); });
}); });
}); });

View File

@ -3,11 +3,12 @@ const { errors } = require('arsenal');
const { bucketPut } = require('../../../lib/api/bucketPut'); const { bucketPut } = require('../../../lib/api/bucketPut');
const constants = require('../../../constants'); const constants = require('../../../constants');
const { cleanup, const {
cleanup,
DummyRequestLogger, DummyRequestLogger,
makeAuthInfo, makeAuthInfo,
AccessControlPolicy } AccessControlPolicy,
= require('../helpers'); } = require('../helpers');
const metadata = require('../metadataswitch'); const metadata = require('../metadataswitch');
const objectPut = require('../../../lib/api/objectPut'); const objectPut = require('../../../lib/api/objectPut');
const objectPutACL = require('../../../lib/api/objectPutACL'); const objectPutACL = require('../../../lib/api/objectPutACL');
@ -17,8 +18,8 @@ const log = new DummyRequestLogger();
const canonicalID = 'accessKey1'; const canonicalID = 'accessKey1';
const authInfo = makeAuthInfo(canonicalID); const authInfo = makeAuthInfo(canonicalID);
const ownerID = authInfo.getCanonicalID(); const ownerID = authInfo.getCanonicalID();
const anotherID = '79a59df900b949e55d96a1e698fba' + const anotherID = '79a59df900b949e55d96a1e698fba'
'cedfd6e09d98eacf8f8d5218e7cd47ef2bf'; + 'cedfd6e09d98eacf8f8d5218e7cd47ef2bf';
const defaultAcpParams = { const defaultAcpParams = {
ownerID, ownerID,
ownerDisplayName: 'OwnerDisplayName', ownerDisplayName: 'OwnerDisplayName',
@ -56,6 +57,7 @@ describe('putObjectACL API', () => {
headers: { 'x-amz-acl': 'invalid-option' }, headers: { 'x-amz-acl': 'invalid-option' },
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {
@ -79,6 +81,7 @@ describe('putObjectACL API', () => {
headers: { 'x-amz-acl': 'public-read-write' }, headers: { 'x-amz-acl': 'public-read-write' },
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {
@ -88,12 +91,12 @@ describe('putObjectACL API', () => {
objectPutACL(authInfo, testObjACLRequest, log, err => { objectPutACL(authInfo, testObjACLRequest, log, err => {
assert.strictEqual(err, null); assert.strictEqual(err, null);
metadata.getObjectMD(bucketName, objectName, {}, metadata.getObjectMD(bucketName, objectName, {},
log, (err, md) => { log, (err, md) => {
assert.strictEqual(md.acl.Canned, assert.strictEqual(md.acl.Canned,
'public-read-write'); 'public-read-write');
assert.strictEqual(md.originOp, 's3:ObjectAcl:Put'); assert.strictEqual(md.originOp, 's3:ObjectAcl:Put');
done(); done();
}); });
}); });
}); });
}); });
@ -108,6 +111,7 @@ describe('putObjectACL API', () => {
headers: { 'x-amz-acl': 'public-read' }, headers: { 'x-amz-acl': 'public-read' },
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
const testObjACLRequest2 = { const testObjACLRequest2 = {
@ -117,6 +121,7 @@ describe('putObjectACL API', () => {
headers: { 'x-amz-acl': 'authenticated-read' }, headers: { 'x-amz-acl': 'authenticated-read' },
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {
@ -126,22 +131,22 @@ describe('putObjectACL API', () => {
objectPutACL(authInfo, testObjACLRequest1, log, err => { objectPutACL(authInfo, testObjACLRequest1, log, err => {
assert.strictEqual(err, null); assert.strictEqual(err, null);
metadata.getObjectMD(bucketName, objectName, {}, metadata.getObjectMD(bucketName, objectName, {},
log, (err, md) => { log, (err, md) => {
assert.strictEqual(md.acl.Canned, assert.strictEqual(md.acl.Canned,
'public-read'); 'public-read');
objectPutACL(authInfo, testObjACLRequest2, log, objectPutACL(authInfo, testObjACLRequest2, log,
err => { err => {
assert.strictEqual(err, null); assert.strictEqual(err, null);
metadata.getObjectMD(bucketName, metadata.getObjectMD(bucketName,
objectName, {}, log, (err, md) => { objectName, {}, log, (err, md) => {
assert.strictEqual(md assert.strictEqual(md
.acl.Canned, .acl.Canned,
'authenticated-read'); 'authenticated-read');
assert.strictEqual(md.originOp, 's3:ObjectAcl:Put'); assert.strictEqual(md.originOp, 's3:ObjectAcl:Put');
done(); done();
}); });
}); });
}); });
}); });
}); });
}); });
@ -154,14 +159,15 @@ describe('putObjectACL API', () => {
objectKey: objectName, objectKey: objectName,
headers: { headers: {
'x-amz-grant-full-control': 'x-amz-grant-full-control':
'emailaddress="sampleaccount1@sampling.com"' + 'emailaddress="sampleaccount1@sampling.com"'
',emailaddress="sampleaccount2@sampling.com"', + ',emailaddress="sampleaccount2@sampling.com"',
'x-amz-grant-read': `uri=${constants.logId}`, 'x-amz-grant-read': `uri=${constants.logId}`,
'x-amz-grant-read-acp': `id=${ownerID}`, 'x-amz-grant-read-acp': `id=${ownerID}`,
'x-amz-grant-write-acp': `id=${anotherID}`, 'x-amz-grant-write-acp': `id=${anotherID}`,
}, },
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log, objectPut(authInfo, testPutObjectRequest, undefined, log,
@ -191,19 +197,20 @@ describe('putObjectACL API', () => {
}); });
}); });
it('should return an error if invalid email ' + it('should return an error if invalid email '
'provided in ACL header request', done => { + 'provided in ACL header request', done => {
const testObjACLRequest = { const testObjACLRequest = {
bucketName, bucketName,
namespace, namespace,
objectKey: objectName, objectKey: objectName,
headers: { headers: {
'x-amz-grant-full-control': 'x-amz-grant-full-control':
'emailaddress="sampleaccount1@sampling.com"' + 'emailaddress="sampleaccount1@sampling.com"'
',emailaddress="nonexistentemail@sampling.com"', + ',emailaddress="nonexistentemail@sampling.com"',
}, },
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {
@ -234,6 +241,7 @@ describe('putObjectACL API', () => {
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
post: [Buffer.from(acp.getXml(), 'utf8')], post: [Buffer.from(acp.getXml(), 'utf8')],
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {
@ -243,25 +251,25 @@ describe('putObjectACL API', () => {
objectPutACL(authInfo, testObjACLRequest, log, err => { objectPutACL(authInfo, testObjACLRequest, log, err => {
assert.strictEqual(err, null); assert.strictEqual(err, null);
metadata.getObjectMD(bucketName, objectName, {}, metadata.getObjectMD(bucketName, objectName, {},
log, (err, md) => { log, (err, md) => {
assert.strictEqual(md assert.strictEqual(md
.acl.FULL_CONTROL[0], ownerID); .acl.FULL_CONTROL[0], ownerID);
assert.strictEqual(md assert.strictEqual(md
.acl.READ[0], constants.publicId); .acl.READ[0], constants.publicId);
assert.strictEqual(md assert.strictEqual(md
.acl.WRITE_ACP[0], ownerID); .acl.WRITE_ACP[0], ownerID);
assert.strictEqual(md assert.strictEqual(md
.acl.READ_ACP[0], anotherID); .acl.READ_ACP[0], anotherID);
assert.strictEqual(md.originOp, 's3:ObjectAcl:Put'); assert.strictEqual(md.originOp, 's3:ObjectAcl:Put');
done(); done();
}); });
}); });
}); });
}); });
}); });
it('should return an error if wrong owner ID ' + it('should return an error if wrong owner ID '
'provided in ACLs set out in request body', done => { + 'provided in ACLs set out in request body', done => {
const acp = new AccessControlPolicy({ ownerID: anotherID }); const acp = new AccessControlPolicy({ ownerID: anotherID });
const testObjACLRequest = { const testObjACLRequest = {
bucketName, bucketName,
@ -271,6 +279,7 @@ describe('putObjectACL API', () => {
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
post: [Buffer.from(acp.getXml(), 'utf8')], post: [Buffer.from(acp.getXml(), 'utf8')],
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {
@ -285,8 +294,8 @@ describe('putObjectACL API', () => {
}); });
}); });
it('should ignore if WRITE ACL permission is ' + it('should ignore if WRITE ACL permission is '
'provided in request body', done => { + 'provided in request body', done => {
const acp = new AccessControlPolicy(defaultAcpParams); const acp = new AccessControlPolicy(defaultAcpParams);
acp.addGrantee('CanonicalUser', ownerID, 'FULL_CONTROL', acp.addGrantee('CanonicalUser', ownerID, 'FULL_CONTROL',
'OwnerDisplayName'); 'OwnerDisplayName');
@ -299,6 +308,7 @@ describe('putObjectACL API', () => {
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
post: [Buffer.from(acp.getXml(), 'utf8')], post: [Buffer.from(acp.getXml(), 'utf8')],
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {
@ -308,25 +318,25 @@ describe('putObjectACL API', () => {
objectPutACL(authInfo, testObjACLRequest, log, err => { objectPutACL(authInfo, testObjACLRequest, log, err => {
assert.strictEqual(err, null); assert.strictEqual(err, null);
metadata.getObjectMD(bucketName, objectName, {}, metadata.getObjectMD(bucketName, objectName, {},
log, (err, md) => { log, (err, md) => {
assert.strictEqual(md.acl.Canned, ''); assert.strictEqual(md.acl.Canned, '');
assert.strictEqual(md.acl.FULL_CONTROL[0], assert.strictEqual(md.acl.FULL_CONTROL[0],
ownerID); ownerID);
assert.strictEqual(md.acl.WRITE, undefined); assert.strictEqual(md.acl.WRITE, undefined);
assert.strictEqual(md.acl.READ[0], undefined); assert.strictEqual(md.acl.READ[0], undefined);
assert.strictEqual(md.acl.WRITE_ACP[0], assert.strictEqual(md.acl.WRITE_ACP[0],
undefined); undefined);
assert.strictEqual(md.acl.READ_ACP[0], assert.strictEqual(md.acl.READ_ACP[0],
undefined); undefined);
done(); done();
}); });
}); });
}); });
}); });
}); });
it('should return an error if invalid email ' + it('should return an error if invalid email '
'address provided in ACLs set out in request body', done => { + 'address provided in ACLs set out in request body', done => {
const acp = new AccessControlPolicy(defaultAcpParams); const acp = new AccessControlPolicy(defaultAcpParams);
acp.addGrantee('AmazonCustomerByEmail', 'xyz@amazon.com', 'WRITE_ACP'); acp.addGrantee('AmazonCustomerByEmail', 'xyz@amazon.com', 'WRITE_ACP');
const testObjACLRequest = { const testObjACLRequest = {
@ -337,6 +347,7 @@ describe('putObjectACL API', () => {
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
post: [Buffer.from(acp.getXml(), 'utf8')], post: [Buffer.from(acp.getXml(), 'utf8')],
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
@ -352,8 +363,8 @@ describe('putObjectACL API', () => {
}); });
}); });
it('should return an error if xml provided does not match s3 ' + it('should return an error if xml provided does not match s3 '
'scheme for setting ACLs', done => { + 'scheme for setting ACLs', done => {
const acp = new AccessControlPolicy(defaultAcpParams); const acp = new AccessControlPolicy(defaultAcpParams);
acp.addGrantee('AmazonCustomerByEmail', 'xyz@amazon.com', 'WRITE_ACP'); acp.addGrantee('AmazonCustomerByEmail', 'xyz@amazon.com', 'WRITE_ACP');
const originalXml = acp.getXml(); const originalXml = acp.getXml();
@ -366,6 +377,7 @@ describe('putObjectACL API', () => {
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
post: [Buffer.from(modifiedXml, 'utf8')], post: [Buffer.from(modifiedXml, 'utf8')],
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {
@ -394,6 +406,7 @@ describe('putObjectACL API', () => {
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
post: [Buffer.from(modifiedXml, 'utf8')], post: [Buffer.from(modifiedXml, 'utf8')],
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
@ -409,11 +422,11 @@ describe('putObjectACL API', () => {
}); });
}); });
it('should return an error if invalid group ' + it('should return an error if invalid group '
'uri provided in ACLs set out in request body', done => { + 'uri provided in ACLs set out in request body', done => {
const acp = new AccessControlPolicy(defaultAcpParams); const acp = new AccessControlPolicy(defaultAcpParams);
acp.addGrantee('Group', 'http://acs.amazonaws.com/groups/' + acp.addGrantee('Group', 'http://acs.amazonaws.com/groups/'
'global/NOTAVALIDGROUP', 'WRITE_ACP'); + 'global/NOTAVALIDGROUP', 'WRITE_ACP');
const testObjACLRequest = { const testObjACLRequest = {
bucketName, bucketName,
namespace, namespace,
@ -422,6 +435,7 @@ describe('putObjectACL API', () => {
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
post: [Buffer.from(acp.getXml(), 'utf8')], post: [Buffer.from(acp.getXml(), 'utf8')],
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {
@ -436,8 +450,8 @@ describe('putObjectACL API', () => {
}); });
}); });
it('should return an error if invalid group uri ' + it('should return an error if invalid group uri '
'provided in ACL header request', done => { + 'provided in ACL header request', done => {
const testObjACLRequest = { const testObjACLRequest = {
bucketName, bucketName,
namespace, namespace,
@ -445,11 +459,12 @@ describe('putObjectACL API', () => {
headers: { headers: {
'host': 's3.amazonaws.com', 'host': 's3.amazonaws.com',
'x-amz-grant-full-control': 'x-amz-grant-full-control':
'uri="http://acs.amazonaws.com/groups/' + 'uri="http://acs.amazonaws.com/groups/'
'global/NOTAVALIDGROUP"', + 'global/NOTAVALIDGROUP"',
}, },
url: `/${bucketName}/${objectName}?acl`, url: `/${bucketName}/${objectName}?acl`,
query: { acl: '' }, query: { acl: '' },
iamAuthzResults: false,
}; };
bucketPut(authInfo, testPutBucketRequest, log, () => { bucketPut(authInfo, testPutBucketRequest, log, () => {

View File

@ -19,6 +19,7 @@ const putBucketRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const putObjectRequest = new DummyRequest({ const putObjectRequest = new DummyRequest({
@ -29,16 +30,17 @@ const putObjectRequest = new DummyRequest({
url: `/${bucketName}/${objectName}`, url: `/${bucketName}/${objectName}`,
}, postBody); }, postBody);
const objectLegalHoldXml = status => '<LegalHold ' + const objectLegalHoldXml = status => '<LegalHold '
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' + + 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
`<Status>${status}</Status>` + + `<Status>${status}</Status>`
'</LegalHold>'; + '</LegalHold>';
const putLegalHoldReq = status => ({ const putLegalHoldReq = status => ({
bucketName, bucketName,
objectKey: objectName, objectKey: objectName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: objectLegalHoldXml(status), post: objectLegalHoldXml(status),
iamAuthzResults: false,
}); });
describe('putObjectLegalHold API', () => { describe('putObjectLegalHold API', () => {
@ -77,11 +79,11 @@ describe('putObjectLegalHold API', () => {
objectPutLegalHold(authInfo, putLegalHoldReq('ON'), log, err => { objectPutLegalHold(authInfo, putLegalHoldReq('ON'), log, err => {
assert.ifError(err); assert.ifError(err);
return metadata.getObjectMD(bucketName, objectName, {}, log, return metadata.getObjectMD(bucketName, objectName, {}, log,
(err, objMD) => { (err, objMD) => {
assert.ifError(err); assert.ifError(err);
assert.strictEqual(objMD.legalHold, true); assert.strictEqual(objMD.legalHold, true);
return done(); return done();
}); });
}); });
}); });
@ -89,11 +91,11 @@ describe('putObjectLegalHold API', () => {
objectPutLegalHold(authInfo, putLegalHoldReq('OFF'), log, err => { objectPutLegalHold(authInfo, putLegalHoldReq('OFF'), log, err => {
assert.ifError(err); assert.ifError(err);
return metadata.getObjectMD(bucketName, objectName, {}, log, return metadata.getObjectMD(bucketName, objectName, {}, log,
(err, objMD) => { (err, objMD) => {
assert.ifError(err); assert.ifError(err);
assert.strictEqual(objMD.legalHold, false); assert.strictEqual(objMD.legalHold, false);
return done(); return done();
}); });
}); });
}); });
}); });

View File

@ -23,6 +23,7 @@ const bucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const putObjectRequest = new DummyRequest({ const putObjectRequest = new DummyRequest({
@ -33,41 +34,42 @@ const putObjectRequest = new DummyRequest({
url: `/${bucketName}/${objectName}`, url: `/${bucketName}/${objectName}`,
}, postBody); }, postBody);
const objectRetentionXmlGovernance = '<Retention ' + const objectRetentionXmlGovernance = '<Retention '
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' + + 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
'<Mode>GOVERNANCE</Mode>' + + '<Mode>GOVERNANCE</Mode>'
`<RetainUntilDate>${expectedDate}</RetainUntilDate>` + + `<RetainUntilDate>${expectedDate}</RetainUntilDate>`
'</Retention>'; + '</Retention>';
const objectRetentionXmlCompliance = '<Retention ' + const objectRetentionXmlCompliance = '<Retention '
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' + + 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
'<Mode>COMPLIANCE</Mode>' + + '<Mode>COMPLIANCE</Mode>'
`<RetainUntilDate>${expectedDate}</RetainUntilDate>` + + `<RetainUntilDate>${expectedDate}</RetainUntilDate>`
'</Retention>'; + '</Retention>';
const objectRetentionXmlGovernanceLonger = '<Retention ' + const objectRetentionXmlGovernanceLonger = '<Retention '
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' + + 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
'<Mode>GOVERNANCE</Mode>' + + '<Mode>GOVERNANCE</Mode>'
`<RetainUntilDate>${moment().add(5, 'days').toISOString()}</RetainUntilDate>` + + `<RetainUntilDate>${moment().add(5, 'days').toISOString()}</RetainUntilDate>`
'</Retention>'; + '</Retention>';
const objectRetentionXmlGovernanceShorter = '<Retention ' + const objectRetentionXmlGovernanceShorter = '<Retention '
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' + + 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
'<Mode>GOVERNANCE</Mode>' + + '<Mode>GOVERNANCE</Mode>'
`<RetainUntilDate>${moment().add(1, 'days').toISOString()}</RetainUntilDate>` + + `<RetainUntilDate>${moment().add(1, 'days').toISOString()}</RetainUntilDate>`
'</Retention>'; + '</Retention>';
const objectRetentionXmlComplianceShorter = '<Retention ' + const objectRetentionXmlComplianceShorter = '<Retention '
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' + + 'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">'
'<Mode>COMPLIANCE</Mode>' + + '<Mode>COMPLIANCE</Mode>'
`<RetainUntilDate>${moment().add(1, 'days').toISOString()}</RetainUntilDate>` + + `<RetainUntilDate>${moment().add(1, 'days').toISOString()}</RetainUntilDate>`
'</Retention>'; + '</Retention>';
const putObjRetRequestGovernance = { const putObjRetRequestGovernance = {
bucketName, bucketName,
objectKey: objectName, objectKey: objectName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: objectRetentionXmlGovernance, post: objectRetentionXmlGovernance,
iamAuthzResults: false,
}; };
const putObjRetRequestGovernanceWithHeader = { const putObjRetRequestGovernanceWithHeader = {
@ -78,6 +80,7 @@ const putObjRetRequestGovernanceWithHeader = {
'x-amz-bypass-governance-retention': 'true', 'x-amz-bypass-governance-retention': 'true',
}, },
post: objectRetentionXmlGovernance, post: objectRetentionXmlGovernance,
iamAuthzResults: false,
}; };
const putObjRetRequestCompliance = { const putObjRetRequestCompliance = {
@ -85,6 +88,7 @@ const putObjRetRequestCompliance = {
objectKey: objectName, objectKey: objectName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: objectRetentionXmlCompliance, post: objectRetentionXmlCompliance,
iamAuthzResults: false,
}; };
const putObjRetRequestComplianceShorter = { const putObjRetRequestComplianceShorter = {
@ -92,6 +96,7 @@ const putObjRetRequestComplianceShorter = {
objectKey: objectName, objectKey: objectName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: objectRetentionXmlComplianceShorter, post: objectRetentionXmlComplianceShorter,
iamAuthzResults: false,
}; };
const putObjRetRequestGovernanceLonger = { const putObjRetRequestGovernanceLonger = {
@ -99,6 +104,7 @@ const putObjRetRequestGovernanceLonger = {
objectKey: objectName, objectKey: objectName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: objectRetentionXmlGovernanceLonger, post: objectRetentionXmlGovernanceLonger,
iamAuthzResults: false,
}; };
const putObjRetRequestGovernanceShorter = { const putObjRetRequestGovernanceShorter = {
@ -106,6 +112,7 @@ const putObjRetRequestGovernanceShorter = {
objectKey: objectName, objectKey: objectName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
post: objectRetentionXmlGovernanceShorter, post: objectRetentionXmlGovernanceShorter,
iamAuthzResults: false,
}; };
describe('putObjectRetention API', () => { describe('putObjectRetention API', () => {
@ -144,12 +151,12 @@ describe('putObjectRetention API', () => {
objectPutRetention(authInfo, putObjRetRequestGovernance, log, err => { objectPutRetention(authInfo, putObjRetRequestGovernance, log, err => {
assert.ifError(err); assert.ifError(err);
return metadata.getObjectMD(bucketName, objectName, {}, log, return metadata.getObjectMD(bucketName, objectName, {}, log,
(err, objMD) => { (err, objMD) => {
assert.ifError(err); assert.ifError(err);
assert.strictEqual(objMD.retentionMode, expectedMode); assert.strictEqual(objMD.retentionMode, expectedMode);
assert.strictEqual(objMD.retentionDate, expectedDate); assert.strictEqual(objMD.retentionDate, expectedDate);
return done(); return done();
}); });
}); });
}); });

View File

@ -3,16 +3,15 @@ const assert = require('assert');
const { bucketPut } = require('../../../lib/api/bucketPut'); const { bucketPut } = require('../../../lib/api/bucketPut');
const objectPut = require('../../../lib/api/objectPut'); const objectPut = require('../../../lib/api/objectPut');
const objectPutTagging = require('../../../lib/api/objectPutTagging'); const objectPutTagging = require('../../../lib/api/objectPutTagging');
const { _validator, parseTagXml } const { _validator, parseTagXml } = require('arsenal').s3middleware.tagging;
= require('arsenal').s3middleware.tagging; const {
const { cleanup, cleanup,
DummyRequestLogger, DummyRequestLogger,
makeAuthInfo, makeAuthInfo,
TaggingConfigTester } TaggingConfigTester,
= require('../helpers'); } = require('../helpers');
const metadata = require('../../../lib/metadata/wrapper'); const metadata = require('../../../lib/metadata/wrapper');
const { taggingTests } const { taggingTests } = require('../../functional/aws-node-sdk/lib/utility/tagging.js');
= require('../../functional/aws-node-sdk/lib/utility/tagging.js');
const DummyRequest = require('../DummyRequest'); const DummyRequest = require('../DummyRequest');
const log = new DummyRequestLogger(); const log = new DummyRequestLogger();
@ -25,6 +24,7 @@ const testBucketPutRequest = {
bucketName, bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` }, headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/', url: '/',
iamAuthzResults: false,
}; };
const testPutObjectRequest = new DummyRequest({ const testPutObjectRequest = new DummyRequest({
@ -42,14 +42,14 @@ function _checkError(err, code, errorName) {
} }
function _generateSampleXml(key, value) { function _generateSampleXml(key, value) {
const xml = '<Tagging>' + const xml = '<Tagging>'
'<TagSet>' + + '<TagSet>'
'<Tag>' + + '<Tag>'
`<Key>${key}</Key>` + + `<Key>${key}</Key>`
`<Value>${value}</Value>` + + `<Value>${value}</Value>`
'</Tag>' + + '</Tag>'
'</TagSet>' + + '</TagSet>'
'</Tagging>'; + '</Tagging>';
return xml; return xml;
} }
@ -62,7 +62,7 @@ describe('putObjectTagging API', () => {
return done(err); return done(err);
} }
return objectPut(authInfo, testPutObjectRequest, undefined, log, return objectPut(authInfo, testPutObjectRequest, undefined, log,
done); done);
}); });
}); });
@ -78,16 +78,16 @@ describe('putObjectTagging API', () => {
return done(err); return done(err);
} }
return metadata.getObjectMD(bucketName, objectName, {}, log, return metadata.getObjectMD(bucketName, objectName, {}, log,
(err, objectMD) => { (err, objectMD) => {
if (err) { if (err) {
process.stdout.write(`Err retrieving object MD ${err}`); process.stdout.write(`Err retrieving object MD ${err}`);
return done(err); return done(err);
} }
const uploadedTags = objectMD.tags; const uploadedTags = objectMD.tags;
assert.deepStrictEqual(uploadedTags, taggingUtil.getTags()); assert.deepStrictEqual(uploadedTags, taggingUtil.getTags());
assert.strictEqual(objectMD.originOp, 's3:ObjectTagging:Put'); assert.strictEqual(objectMD.originOp, 's3:ObjectTagging:Put');
return done(); return done();
}); });
}); });
}); });
}); });
@ -95,55 +95,101 @@ describe('putObjectTagging API', () => {
describe('PUT object tagging :: helper validation functions ', () => { describe('PUT object tagging :: helper validation functions ', () => {
describe('validateTagStructure ', () => { describe('validateTagStructure ', () => {
it('should return expected true if tag is valid false/undefined if not', it('should return expected true if tag is valid false/undefined if not',
done => { done => {
const tags = [ const tags = [
{ tagTest: { Key: ['foo'], Value: ['bar'] }, isValid: true }, { tagTest: { Key: ['foo'], Value: ['bar'] }, isValid: true },
{ tagTest: { Key: ['foo'] }, isValid: false }, { tagTest: { Key: ['foo'] }, isValid: false },
{ tagTest: { Value: ['bar'] }, isValid: false }, { tagTest: { Value: ['bar'] }, isValid: false },
{ tagTest: { Keys: ['foo'], Value: ['bar'] }, isValid: false }, { tagTest: { Keys: ['foo'], Value: ['bar'] }, isValid: false },
{ tagTest: { Key: ['foo', 'boo'], Value: ['bar'] }, {
isValid: false }, tagTest: { Key: ['foo', 'boo'], Value: ['bar'] },
{ tagTest: { Key: ['foo'], Value: ['bar', 'boo'] }, isValid: false,
isValid: false }, },
{ tagTest: { Key: ['foo', 'boo'], Value: ['bar', 'boo'] }, {
isValid: false }, tagTest: { Key: ['foo'], Value: ['bar', 'boo'] },
{ tagTest: { Key: ['foo'], Values: ['bar'] }, isValid: false }, isValid: false,
{ tagTest: { Keys: ['foo'], Values: ['bar'] }, isValid: false }, },
]; {
tagTest: { Key: ['foo', 'boo'], Value: ['bar', 'boo'] },
isValid: false,
},
{ tagTest: { Key: ['foo'], Values: ['bar'] }, isValid: false },
{ tagTest: { Keys: ['foo'], Values: ['bar'] }, isValid: false },
];
for (let i = 0; i < tags.length; i++) { for (let i = 0; i < tags.length; i++) {
const tag = tags[i]; const tag = tags[i];
const result = _validator.validateTagStructure(tag.tagTest); const result = _validator.validateTagStructure(tag.tagTest);
if (tag.isValid) { if (tag.isValid) {
assert(result); assert(result);
} else { } else {
assert(!result); assert(!result);
}
} }
} done();
done(); });
});
describe('validateXMLStructure ', () => { describe('validateXMLStructure ', () => {
it('should return expected true if tag is valid false/undefined ' + it('should return expected true if tag is valid false/undefined '
'if not', done => { + 'if not', done => {
const tags = [ const tags = [
{ tagging: { Tagging: { TagSet: [{ Tag: [] }] } }, isValid: {
true }, tagging: { Tagging: { TagSet: [{ Tag: [] }] } },
isValid:
true,
},
{ tagging: { Tagging: { TagSet: [''] } }, isValid: true }, { tagging: { Tagging: { TagSet: [''] } }, isValid: true },
{ tagging: { Tagging: { TagSet: [] } }, isValid: false }, { tagging: { Tagging: { TagSet: [] } }, isValid: false },
{ tagging: { Tagging: { TagSet: [{}] } }, isValid: false }, { tagging: { Tagging: { TagSet: [{}] } }, isValid: false },
{ tagging: { Tagging: { Tagset: [{ Tag: [] }] } }, isValid: {
false }, tagging: { Tagging: { Tagset: [{ Tag: [] }] } },
{ tagging: { Tagging: { Tagset: [{ Tag: [] }] }, isValid:
ExtraTagging: 'extratagging' }, isValid: false }, false,
{ tagging: { Tagging: { Tagset: [{ Tag: [] }], ExtraTagset: },
'extratagset' } }, isValid: false }, {
{ tagging: { Tagging: { Tagset: [{ Tag: [] }], ExtraTagset: tagging: {
'extratagset' } }, isValid: false }, Tagging: { Tagset: [{ Tag: [] }] },
{ tagging: { Tagging: { Tagset: [{ Tag: [], ExtraTag: ExtraTagging: 'extratagging',
'extratag' }] } }, isValid: false }, },
{ tagging: { Tagging: { Tagset: [{ Tag: {} }] } }, isValid: isValid: false,
false }, },
{
tagging: {
Tagging: {
Tagset: [{ Tag: [] }],
ExtraTagset:
'extratagset',
},
},
isValid: false,
},
{
tagging: {
Tagging: {
Tagset: [{ Tag: [] }],
ExtraTagset:
'extratagset',
},
},
isValid: false,
},
{
tagging: {
Tagging: {
Tagset: [{
Tag: [],
ExtraTag:
'extratag',
}],
},
},
isValid: false,
},
{
tagging: { Tagging: { Tagset: [{ Tag: {} }] } },
isValid:
false,
},
]; ];
for (let i = 0; i < tags.length; i++) { for (let i = 0; i < tags.length; i++) {
@ -172,8 +218,8 @@ describe('PUT object tagging :: helper validation functions ', () => {
taggingTests.forEach(taggingTest => { taggingTests.forEach(taggingTest => {
it(taggingTest.it, done => { it(taggingTest.it, done => {
const key = taggingTest.tag.key; const { key } = taggingTest.tag;
const value = taggingTest.tag.value; const { value } = taggingTest.tag;
const xml = _generateSampleXml(key, value); const xml = _generateSampleXml(key, value);
parseTagXml(xml, log, (err, result) => { parseTagXml(xml, log, (err, result) => {
if (taggingTest.error) { if (taggingTest.error) {

View File

@ -24,6 +24,7 @@ describe('serviceGet API', () => {
parsedHost: 's3.amazonaws.com', parsedHost: 's3.amazonaws.com',
headers: { host: 's3.amazonaws.com' }, headers: { host: 's3.amazonaws.com' },
url: '/', url: '/',
iamAuthzResults: false,
}; };
it('should return the list of buckets owned by the user', done => { it('should return the list of buckets owned by the user', done => {

View File

@ -0,0 +1,236 @@
const assert = require('assert');
const { checkBucketAcls, checkObjectAcls } = require('../../../lib/api/apiUtils/authorization/permissionChecks');
const constants = require('../../../constants');
const { bucketOwnerActions } = constants;
describe('checkBucketAcls', () => {
const mockBucket = {
getOwner: () => 'ownerId',
getAcl: () => ({
Canned: '',
FULL_CONTROL: [],
READ: [],
READ_ACP: [],
WRITE: [],
WRITE_ACP: [],
}),
};
const testScenarios = [
{
description: 'should return true if bucket owner matches canonicalID',
input: {
bucketAcl: {}, requestType: 'anyType', canonicalID: 'ownerId', mainApiCall: 'anyApiCall',
},
expected: true,
},
{
description: 'should return true for objectGetTagging when mainApiCall is objectGet',
input: {
bucketAcl: {}, requestType: 'objectGetTagging', canonicalID: 'anyId', mainApiCall: 'objectGet',
},
expected: true,
},
{
description: 'should return true for objectPutTagging when mainApiCall is objectPut',
input: {
bucketAcl: {}, requestType: 'objectPutTagging', canonicalID: 'anyId', mainApiCall: 'objectPut',
},
expected: true,
},
{
description: 'should return true for objectPutLegalHold when mainApiCall is objectPut',
input: {
bucketAcl: {}, requestType: 'objectPutLegalHold', canonicalID: 'anyId', mainApiCall: 'objectPut',
},
expected: true,
},
{
description: 'should return true for objectPutRetention when mainApiCall is objectPut',
input: {
bucketAcl: {}, requestType: 'objectPutRetention', canonicalID: 'anyId', mainApiCall: 'objectPut',
},
expected: true,
},
{
description: 'should return true for bucketGet if canned acl is public-read-write',
input: {
bucketAcl: { Canned: 'public-read-write' },
requestType: 'bucketGet',
canonicalID: 'anyId',
mainApiCall: 'anyApiCall',
},
expected: true,
},
{
description: 'should return true for bucketGet if canned acl is authenticated-read and id is not publicId',
input: {
bucketAcl: { Canned: 'authenticated-read' },
requestType: 'bucketGet',
canonicalID: 'anyIdNotPublic',
mainApiCall: 'anyApiCall',
},
expected: true,
},
{
description: 'should return true for bucketGet if canonicalID has FULL_CONTROL access',
input: {
bucketAcl: { FULL_CONTROL: ['anyId'], READ: [] },
requestType: 'bucketGet',
canonicalID: 'anyId',
mainApiCall: 'anyApiCall',
},
expected: true,
},
{
description: 'should return true for bucketGetACL if canonicalID has FULL_CONTROL',
input: {
bucketAcl: { FULL_CONTROL: ['anyId'], READ_ACP: [] },
requestType: 'bucketGetACL',
canonicalID: 'anyId',
mainApiCall: 'anyApiCall',
},
expected: true,
},
{
description: 'should return true for objectDelete if bucketAcl.Canned is public-read-write',
input: {
bucketAcl: { Canned: 'public-read-write' },
requestType: 'objectDelete',
canonicalID: 'anyId',
mainApiCall: 'anyApiCall',
},
expected: true,
},
{
description: 'should return true for requestType ending with "Version"',
input: {
bucketAcl: {},
requestType: 'objectGetVersion',
canonicalID: 'anyId',
mainApiCall: 'objectGet',
},
expected: true,
},
{
description: 'should return false for unmatched scenarios',
input: {
bucketAcl: {},
requestType: 'unmatchedRequest',
canonicalID: 'anyId',
mainApiCall: 'anyApiCall',
},
expected: false,
},
];
testScenarios.forEach(scenario => {
it(scenario.description, () => {
// Mock the bucket based on the test scenario's input
mockBucket.getAcl = () => scenario.input.bucketAcl;
const result = checkBucketAcls(mockBucket,
scenario.input.requestType, scenario.input.canonicalID, scenario.input.mainApiCall);
assert.strictEqual(result, scenario.expected);
});
});
});
describe('checkObjectAcls', () => {
const mockBucket = {
getOwner: () => 'bucketOwnerId',
getName: () => 'bucketName',
getAcl: () => ({ Canned: '' }),
};
const mockObjectMD = {
'owner-id': 'objectOwnerId',
'acl': {
Canned: '',
FULL_CONTROL: [],
READ: [],
READ_ACP: [],
WRITE: [],
WRITE_ACP: [],
},
};
it('should return true if request type is in bucketOwnerActions and bucket owner matches canonicalID', () => {
assert.strictEqual(checkObjectAcls(mockBucket, mockObjectMD, bucketOwnerActions[0],
'bucketOwnerId', false, false, 'anyApiCall'), true);
});
it('should return true if objectMD owner matches canonicalID', () => {
assert.strictEqual(checkObjectAcls(mockBucket, mockObjectMD, 'anyType',
'objectOwnerId', false, false, 'anyApiCall'), true);
});
it('should return true for objectGetTagging when mainApiCall is objectGet and conditions met', () => {
assert.strictEqual(checkObjectAcls(mockBucket, mockObjectMD, 'objectGetTagging',
'anyIdNotPublic', true, true, 'objectGet'), true);
});
it('should return false if no acl provided in objectMD', () => {
const objMDWithoutAcl = Object.assign({}, mockObjectMD);
delete objMDWithoutAcl.acl;
assert.strictEqual(checkObjectAcls(mockBucket, objMDWithoutAcl, 'anyType',
'anyId', false, false, 'anyApiCall'), false);
});
const tests = [
{
acl: 'public-read', reqType: 'objectGet', id: 'anyIdNotPublic', expected: true,
},
{
acl: 'public-read-write', reqType: 'objectGet', id: 'anyIdNotPublic', expected: true,
},
{
acl: 'authenticated-read', reqType: 'objectGet', id: 'anyIdNotPublic', expected: true,
},
{
acl: 'bucket-owner-read', reqType: 'objectGet', id: 'bucketOwnerId', expected: true,
},
{
acl: 'bucket-owner-full-control', reqType: 'objectGet', id: 'bucketOwnerId', expected: true,
},
{
aclList: ['someId', 'anyIdNotPublic'],
aclField: 'FULL_CONTROL',
reqType: 'objectGet',
id: 'anyIdNotPublic',
expected: true,
},
{
aclList: ['someId', 'anyIdNotPublic'],
aclField: 'READ',
reqType: 'objectGet',
id: 'anyIdNotPublic',
expected: true,
},
{ reqType: 'objectPut', id: 'anyId', expected: true },
{ reqType: 'objectDelete', id: 'anyId', expected: true },
{
aclList: ['anyId'], aclField: 'FULL_CONTROL', reqType: 'objectPutACL', id: 'anyId', expected: true,
},
{
acl: '', reqType: 'objectGet', id: 'randomId', expected: false,
},
];
tests.forEach(test => {
it(`should return ${test.expected} for ${test.reqType} with ACL as ${test.acl
|| (`${test.aclField}:${JSON.stringify(test.aclList)}`)}`, () => {
if (test.acl) {
mockObjectMD.acl.Canned = test.acl;
} else if (test.aclList && test.aclField) {
mockObjectMD.acl[test.aclField] = test.aclList;
}
assert.strictEqual(
checkObjectAcls(mockBucket, mockObjectMD, test.reqType, test.id, false, false, 'anyApiCall'),
test.expected,
);
});
});
});