Compare commits

...

2 Commits

Author SHA1 Message Date
williamlardier 345fa52c27 wip 2024-02-13 00:38:41 +01:00
williamlardier 9f1dd45671 wip 2024-02-13 00:13:26 +01:00
47 changed files with 174 additions and 82 deletions

View File

@ -46,6 +46,7 @@
"new-parens": "off",
"no-multi-spaces": "off",
"quote-props": "off",
"no-console": "off",
"mocha/no-exclusive-tests": "error",
},
"parserOptions": {

View File

@ -48,20 +48,32 @@ function isRequesterNonAccountUser(authInfo) {
* @returns {boolean} - Returns true if the user has the necessary access rights, otherwise false.
*/
const considerAsObjectPut = ['initiateMultipartUpload', 'objectPutPart', 'completeMultipartUpload'];
function checkBucketAcls(bucket, requestType, canonicalID, mainApiCall) {
// Same logic applies on the Versioned APIs, so let's simplify it.
const requestTypeParsed = requestType.endsWith('Version') ?
let requestTypeParsed = requestType.endsWith('Version') ?
requestType.slice(0, 'Version'.length * -1) : requestType;
requestTypeParsed = considerAsObjectPut.includes(requestTypeParsed) ? 'objectPut' : requestTypeParsed;
const parsedMainApiCall = considerAsObjectPut.includes(mainApiCall) ? 'objectPut' : mainApiCall;
console.log({
requestType,
requestTypeParsed,
parsedMainApiCall,
});
if (bucket.getOwner() === canonicalID) {
console.log('acl true because bucket owner');
return true;
}
if (mainApiCall === 'objectGet') {
if (parsedMainApiCall === 'objectGet') {
if (requestTypeParsed === 'objectGetTagging') {
console.log('acl true because objectGetTagging');
return true;
}
}
if (mainApiCall === 'objectPut') {
if (parsedMainApiCall === 'objectPut') {
if (arrayOfAllowed.includes(requestTypeParsed)) {
console.log('acl true because objectPut');
return true;
}
}
@ -135,6 +147,7 @@ function checkBucketAcls(bucket, requestType, canonicalID, mainApiCall) {
// objectPutACL, objectGetACL, objectHead or objectGet, the bucket
// authorization check should just return true so can move on to check
// rights at the object level.
console.log('acls checks end');
return (requestTypeParsed === 'objectPutACL' || requestTypeParsed === 'objectGetACL'
|| requestTypeParsed === 'objectGet' || requestTypeParsed === 'objectHead');
}
@ -329,6 +342,16 @@ function checkBucketPolicy(policy, requestType, canonicalID, arn, bucketOwner, l
const actionMatch = _checkBucketPolicyActions(requestType, s.Action, log);
const resourceMatch = _checkBucketPolicyResources(request, s.Resource, log);
const conditionsMatch = _checkBucketPolicyConditions(request, s.Condition, log);
console.log('checkBucketPolicy', {
principalMatch,
actionMatch,
resourceMatch,
conditionsMatch,
canonicalID,
requestType,
arn,
s,
});
if (principalMatch && actionMatch && resourceMatch && conditionsMatch && s.Effect === 'Deny') {
// explicit deny trumps any allows, so return immediately
@ -347,11 +370,15 @@ function processBucketPolicy(requestType, bucket, canonicalID, arn, bucketOwner,
const bucketPolicy = bucket.getBucketPolicy();
let processedResult = results[requestType];
if (!bucketPolicy) {
console.log('no bucket policy', { actionImplicitDenies }, actionImplicitDenies[requestType], { aclPermission });
// must also check that this is indeed an allow, not an explicit deny
processedResult = actionImplicitDenies[requestType] === false && aclPermission;
} else {
const bucketPolicyPermission = checkBucketPolicy(bucketPolicy, requestType, canonicalID, arn,
bucketOwner, log, request, actionImplicitDenies);
console.log('bk results!', bucketPolicyPermission, actionImplicitDenies[requestType]);
if (bucketPolicyPermission === 'explicitDeny') {
processedResult = false;
} else if (bucketPolicyPermission === 'allow') {
@ -360,6 +387,15 @@ function processBucketPolicy(requestType, bucket, canonicalID, arn, bucketOwner,
processedResult = actionImplicitDenies[requestType] === false && aclPermission;
}
}
console.log('processBP result', {
requestType,
bucketPolicy,
canonicalID,
arn,
bucketOwner,
aclPermission,
processedResult,
});
return processedResult;
}
@ -385,14 +421,22 @@ function isBucketAuthorized(bucket, requestTypesInput, canonicalID, authInfo, lo
// if the bucket owner is an account, users should not have default access
if ((bucket.getOwner() === canonicalID) && requesterIsNotUser || isServiceAccount(canonicalID)) {
results[_requestType] = actionImplicitDenies[_requestType] === false;
console.log('result bucket authz case account or service account', results[_requestType]);
return results[_requestType];
}
const aclPermission = checkBucketAcls(bucket, _requestType, canonicalID, mainApiCall);
console.log('acl checks on bucket', {
aclPermission,
bucketOwner: bucket.getOwner(),
canonicalID,
requesterIsNotUser,
isServiceAccount: isServiceAccount(canonicalID),
});
// In case of error bucket access is checked with bucketGet
// For website, bucket policy only uses objectGet and ignores bucketGet
// https://docs.aws.amazon.com/AmazonS3/latest/userguide/WebsiteAccessPermissionsReqd.html
// bucketGet should be used to check acl but switched to objectGet for bucket policy
if (isWebsite && _requestType === 'bucketGet') {
if (isWebsite && (_requestType === 'bucketGet' || _requestType === 'listObjectVersions')) {
// eslint-disable-next-line no-param-reassign
_requestType = 'objectGet';
actionImplicitDenies.objectGet = actionImplicitDenies.objectGet || false;
@ -432,18 +476,34 @@ function isObjAuthorized(bucket, objectMD, requestTypesInput, canonicalID, authI
actionImplicitDenies[_requestType] = actionImplicitDenies[_requestType] || false;
const parsedMethodName = _requestType.endsWith('Version')
? _requestType.slice(0, -7) : _requestType;
console.log('parsedMethodName', parsedMethodName);
const bucketOwner = bucket.getOwner();
if (!objectMD) {
// check bucket has read access
// 'bucketGet' covers listObjects and listMultipartUploads, bucket read actions
results[_requestType] = isBucketAuthorized(bucket, 'bucketGet', canonicalID, authInfo, log, request,
let permision = 'bucketGet';
if (considerAsObjectPut.includes(_requestType)) {
permision = 'objectPut';
}
results[_requestType] = isBucketAuthorized(bucket, permision, canonicalID, authInfo, log, request,
actionImplicitDenies, isWebsite);
// User is already authorized on the bucket for FULL_CONTROL or WRITE or
// bucket has canned ACL public-read-write
console.log('checking parsedMethodName', parsedMethodName, results[_requestType]);
if ((parsedMethodName === 'objectPut' || parsedMethodName === 'objectDelete')
&& results[_requestType] === false) {
results[_requestType] = actionImplicitDenies[_requestType] === false;
console.log('object authorized? case objectPut or objectDelete', {
requestType: _requestType,
results: results[_requestType],
});
}
// If the API is MPU related, we need to check if the user has
// permissions to the bucket
console.log('object authorized? case no objectmd', {
requestType: _requestType,
results: results[_requestType],
});
return results[_requestType];
}
let requesterIsNotUser = true;
@ -456,6 +516,10 @@ function isObjAuthorized(bucket, objectMD, requestTypesInput, canonicalID, authI
}
if (objectMD['owner-id'] === canonicalID && requesterIsNotUser || isServiceAccount(canonicalID)) {
results[_requestType] = actionImplicitDenies[_requestType] === false;
console.log('object authorized? case account or service account', {
requestType: _requestType,
results: results[_requestType],
});
return results[_requestType];
}
// account is authorized if:
@ -466,6 +530,10 @@ function isObjAuthorized(bucket, objectMD, requestTypesInput, canonicalID, authI
&& (bucketOwner === canonicalID)
&& requesterIsNotUser) {
results[_requestType] = actionImplicitDenies[_requestType] === false;
console.log('object authorized? case bucketOwnerActions', {
requestType: _requestType,
results: results[_requestType],
});
return results[_requestType];
}
const aclPermission = checkObjectAcls(bucket, objectMD, parsedMethodName,

View File

@ -30,6 +30,9 @@ function bucketShield(bucket, requestType) {
// Otherwise return an error to the client
if ((bucket.hasDeletedFlag() || bucket.hasTransientFlag()) &&
(requestType !== 'objectPut' &&
requestType !== 'initiateMultipartUpload' &&
requestType !== 'objectPutPart' &&
requestType !== 'completeMultipartUpload' &&
requestType !== 'bucketPutACL' &&
requestType !== 'bucketDelete')) {
return true;

View File

@ -14,7 +14,7 @@ function abortMultipartUpload(authInfo, bucketName, objectKey, uploadId, log,
bucketName,
objectKey,
uploadId,
preciseRequestType: 'multipartDelete',
preciseRequestType: request.apiMethods || 'multipartDelete',
request,
};
// For validating the request at the destinationBucket level

View File

@ -300,13 +300,16 @@ function checkUserGovernanceBypass(request, authInfo, bucketMD, objectKey, log,
signatureAge: authParams.params.data.signatureAge,
},
};
console.log('checking for s3:BypassGovernanceRetention permission');
return vault.checkPolicies(requestContextParams,
authInfo.getArn(), log, (err, authorizationResults) => {
console.log('==================================', err, authorizationResults, vault.checkPolicies);
if (err) {
return cb(err);
}
const explicitDenyExists = authorizationResults.some(
authzResult => authzResult.isAllowed === false && !authzResult.isImplicit);
console.log('++++++++++++=', explicitDenyExists);
if (explicitDenyExists) {
log.trace('authorization check failed for user',
{
@ -333,6 +336,13 @@ function checkUserGovernanceBypass(request, authInfo, bucketMD, objectKey, log,
log,
request);
console.log('actionImplicitDenies', actionImplicitDenies, areAllActionsAllowed, {
bucketMD,
actionImplicitDeniesKeys: Object.keys(actionImplicitDenies),
authInfo,
actionImplicitDenies,
});
return cb(areAllActionsAllowed === true ? null : errors.AccessDenied);
});
}

View File

@ -58,7 +58,7 @@ function objectRestore(metadata, mdUtils, userInfo, request, log, callback) {
bucketName,
objectKey,
versionId: decodedVidResult,
requestType: 'restoreObject',
requestType: request.apiMethods || 'restoreObject',
};
return async.waterfall([

View File

@ -31,14 +31,14 @@ function bucketDeleteCors(authInfo, request, log, callback) {
'deleteBucketCors');
return callback(err);
}
if (bucketShield(bucket, requestType)) {
if (bucketShield(bucket, request.apiMethods || requestType)) {
monitoring.promMetrics('DELETE', bucketName, 400,
'deleteBucketCors');
return callback(errors.NoSuchBucket);
}
log.trace('found bucket in metadata');
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request,
if (!isBucketAuthorized(bucket, request.apiMethods || requestType, canonicalID, authInfo, log, request,
request.actionImplicitDenies)) {
log.debug('access denied for user on bucket', {
requestType,

View File

@ -21,7 +21,7 @@ function bucketDeleteEncryption(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketDeleteEncryption',
requestType: request.apiMethods || 'bucketDeleteEncryption',
request,
};

View File

@ -18,7 +18,7 @@ function bucketDeleteLifecycle(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketDeleteLifecycle',
requestType: request.apiMethods || 'bucketDeleteLifecycle',
request,
};
return standardMetadataValidateBucket(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {

View File

@ -16,7 +16,7 @@ function bucketDeletePolicy(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketDeletePolicy',
requestType: request.apiMethods || 'bucketDeletePolicy',
request,
};
return standardMetadataValidateBucket(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {

View File

@ -18,7 +18,7 @@ function bucketDeleteReplication(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketDeleteReplication',
requestType: request.apiMethods || 'bucketDeleteReplication',
request,
};
return standardMetadataValidateBucket(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {

View File

@ -30,7 +30,7 @@ function bucketDeleteWebsite(authInfo, request, log, callback) {
}
log.trace('found bucket in metadata');
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request,
if (!isBucketAuthorized(bucket, request.apiMethods || requestType, canonicalID, authInfo, log, request,
request.actionImplicitDenies)) {
log.debug('access denied for user on bucket', {
requestType,

View File

@ -322,7 +322,7 @@ function bucketGet(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketGet',
requestType: request.apiMethods || 'bucketGet',
request,
};
const listParams = {

View File

@ -44,7 +44,7 @@ function bucketGetACL(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketGetACL',
requestType: request.apiMethods || 'bucketGetACL',
request,
};
const grantInfo = {

View File

@ -39,7 +39,7 @@ function bucketGetCors(authInfo, request, log, callback) {
const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log,
if (!isBucketAuthorized(bucket, request.apiMethods || requestType, canonicalID, authInfo, log,
request, request.actionImplicitDenies)) {
log.debug('access denied for user on bucket', {
requestType,

View File

@ -22,7 +22,7 @@ function bucketGetEncryption(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketGetEncryption',
requestType: request.apiMethods || 'bucketGetEncryption',
request,
};

View File

@ -21,7 +21,7 @@ function bucketGetLifecycle(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketGetLifecycle',
requestType: request.apiMethods || 'bucketGetLifecycle',
request,
};
return standardMetadataValidateBucket(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {

View File

@ -41,7 +41,7 @@ function bucketGetLocation(authInfo, request, log, callback) {
const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log, request,
if (!isBucketAuthorized(bucket, request.apiMethods || requestType, canonicalID, authInfo, log, request,
request.actionImplicitDenies)) {
log.debug('access denied for account on bucket', {
requestType,

View File

@ -37,7 +37,7 @@ function bucketGetNotification(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketGetNotification',
requestType: request.apiMethods || 'bucketGetNotification',
request,
};

View File

@ -33,7 +33,7 @@ function bucketGetObjectLock(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketGetObjectLock',
requestType: request.apiMethods || 'bucketGetObjectLock',
request,
};
return standardMetadataValidateBucket(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {

View File

@ -17,7 +17,7 @@ function bucketGetPolicy(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketGetPolicy',
requestType: request.apiMethods || 'bucketGetPolicy',
request,
};

View File

@ -21,7 +21,7 @@ function bucketGetReplication(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketGetReplication',
requestType: request.apiMethods || 'bucketGetReplication',
request,
};
return standardMetadataValidateBucket(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {

View File

@ -54,7 +54,7 @@ function bucketGetVersioning(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketGetVersioning',
requestType: request.apiMethods || 'bucketGetVersioning',
request,
};

View File

@ -39,7 +39,7 @@ function bucketGetWebsite(authInfo, request, log, callback) {
const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
if (!isBucketAuthorized(bucket, requestType, canonicalID, authInfo, log,
if (!isBucketAuthorized(bucket, request.apiMethods || requestType, canonicalID, authInfo, log,
request, request.actionImplicitDenies)) {
log.debug('access denied for user on bucket', {
requestType,

View File

@ -19,7 +19,7 @@ function bucketHead(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketHead',
requestType: request.apiMethods || 'bucketHead',
request,
};
standardMetadataValidateBucket(metadataValParams, request.actionImplicitDenies, log, (err, bucket) => {

View File

@ -26,7 +26,7 @@ function bucketPutObjectLock(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'bucketPutObjectLock',
requestType: request.apiMethods || 'bucketPutObjectLock',
request,
};
return waterfall([

View File

@ -133,7 +133,7 @@ function completeMultipartUpload(authInfo, request, log, callback) {
bucketName,
// Required permissions for this action
// at the destinationBucket level are same as objectPut
requestType: 'objectPut',
requestType: request.apiMethods || 'completeMultipartUpload',
versionId,
request,
};

View File

@ -105,7 +105,7 @@ function initiateMultipartUpload(authInfo, request, log, callback) {
authInfo,
bucketName,
// Required permissions for this action are same as objectPut
requestType: 'objectPut',
requestType: request.apiMethods || 'initiateMultipartUpload',
request,
};
const accountCanonicalID = authInfo.getCanonicalID();

View File

@ -96,8 +96,8 @@ function listMultipartUploads(authInfo, request, log, callback) {
// to list the multipart uploads so we have provided here that
// the authorization to list multipart uploads is the same
// as listing objects in a bucket.
requestType: 'bucketGet',
preciseRequestType: 'listMultipartUploads',
requestType: request.apiMethods || 'bucketGet',
preciseRequestType: request.apiMethods || 'listMultipartUploads',
request,
};

View File

@ -97,7 +97,7 @@ function listParts(authInfo, request, log, callback) {
bucketName,
objectKey,
uploadId,
preciseRequestType: 'listParts',
preciseRequestType: request.apiMethods || 'listParts',
request,
};
// For validating the request at the destinationBucket level

View File

@ -71,7 +71,7 @@ function metadataSearch(authInfo, request, log, callback) {
const metadataValParams = {
authInfo,
bucketName,
requestType: 'metadataSearch',
requestType: request.apiMethods || 'metadataSearch',
request,
};
const listParams = {

View File

@ -56,7 +56,7 @@ function objectDeleteInternal(authInfo, request, log, isExpiration, cb) {
bucketName,
objectKey,
versionId: reqVersionId,
requestType: 'objectDelete',
requestType: request.apiMethods || 'objectDelete',
request,
};

View File

@ -44,7 +44,7 @@ function objectDeleteTagging(authInfo, request, log, callback) {
objectKey,
versionId: reqVersionId,
getDeleteMarker: true,
requestType: 'objectDeleteTagging',
requestType: request.apiMethods || 'objectDeleteTagging',
request,
};

View File

@ -66,7 +66,7 @@ function objectGet(authInfo, request, returnTagCount, log, callback) {
objectKey,
versionId,
getDeleteMarker: true,
requestType: 'objectGet',
requestType: request.apiMethods || 'objectGet',
request,
};

View File

@ -61,7 +61,7 @@ function objectGetACL(authInfo, request, log, callback) {
bucketName,
objectKey,
versionId,
requestType: 'objectGetACL',
requestType: request.apiMethods || 'objectGetACL',
request,
};
const grantInfo = {

View File

@ -40,7 +40,7 @@ function objectGetLegalHold(authInfo, request, log, callback) {
bucketName,
objectKey,
versionId,
requestType: 'objectGetLegalHold',
requestType: request.apiMethods || 'objectGetLegalHold',
request,
};

View File

@ -40,7 +40,7 @@ function objectGetRetention(authInfo, request, log, callback) {
bucketName,
objectKey,
versionId: reqVersionId,
requestType: 'objectGetRetention',
requestType: request.apiMethods || 'objectGetRetention',
request,
};

View File

@ -41,7 +41,7 @@ function objectGetTagging(authInfo, request, log, callback) {
bucketName,
objectKey,
versionId: reqVersionId,
requestType: 'objectGetTagging',
requestType: request.apiMethods || 'objectGetTagging',
request,
};

View File

@ -48,7 +48,7 @@ function objectHead(authInfo, request, log, callback) {
objectKey,
versionId,
getDeleteMarker: true,
requestType: 'objectHead',
requestType: request.apiMethods || 'objectHead',
request,
};

View File

@ -68,7 +68,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
authInfo,
bucketName: destBucketName,
objectKey: destObjectKey,
requestType: 'objectPut',
requestType: 'objectPutPart',
request,
};

View File

@ -125,8 +125,8 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
(destinationBucket, next) => {
// For validating the request at the destinationBucket level the
// `requestType` is the general 'objectPut'.
const requestType = 'objectPut';
if (!isBucketAuthorized(destinationBucket, request.apiMethods || requestType, canonicalID, authInfo,
const requestType = request.apiMethods || 'objectPutPart';
if (!isBucketAuthorized(destinationBucket, requestType, canonicalID, authInfo,
log, request, request.actionImplicitDenies)) {
log.debug('access denied for user on bucket', { requestType });
return next(errors.AccessDenied, destinationBucket);

View File

@ -189,6 +189,8 @@ function standardMetadataValidateBucketAndObj(params, actionImplicitDenies, log,
if (!Array.isArray(requestType)) {
requestType = [requestType];
}
console.log('entering standardMetadataValidateBucketAndObj',
{ authInfo, bucketName, objectKey, versionId, getDeleteMarker, requestType });
async.waterfall([
next => {
// versionId may be 'null', which asks metadata to fetch the null key specifically
@ -197,6 +199,7 @@ function standardMetadataValidateBucketAndObj(params, actionImplicitDenies, log,
getOptions.getDeleteMarker = true;
}
return metadata.getBucketAndObjectMD(bucketName, objectKey, getOptions, log, (err, getResult) => {
console.log('metadata.getBucketAndObjectMD', { err, getResult });
if (err) {
// if some implicit iamAuthzResults, return AccessDenied
// before leaking any state information
@ -219,6 +222,7 @@ function standardMetadataValidateBucketAndObj(params, actionImplicitDenies, log,
return next(errors.NoSuchBucket);
}
const validationError = validateBucket(bucket, params, log, actionImplicitDenies);
console.log('validateBucket', { validationError, bucket });
if (validationError) {
return next(validationError, bucket);
}
@ -236,9 +240,11 @@ function standardMetadataValidateBucketAndObj(params, actionImplicitDenies, log,
log.debug('access denied for user on object', { requestType });
return next(errors.AccessDenied, bucket);
}
console.log('object authorize', { bucket, objMD });
return next(null, bucket, objMD);
},
], (err, bucket, objMD) => {
console.log('end of bucket checks', { err, bucket, objMD });
if (err) {
// still return bucket for cors headers
return callback(err, bucket);
@ -261,6 +267,7 @@ function standardMetadataValidateBucketAndObj(params, actionImplicitDenies, log,
function standardMetadataValidateBucket(params, actionImplicitDenies, log, callback) {
const { bucketName } = params;
return metadata.getBucket(bucketName, log, (err, bucket) => {
console.log('.getBucket', { err, bucket });
if (err) {
// if some implicit actionImplicitDenies, return AccessDenied before
// leaking any state information

View File

@ -1481,7 +1481,7 @@ function routeBackbeat(clientIP, request, response, log) {
objectKey: request.objectKey,
authInfo: userInfo,
versionId,
requestType: 'ReplicateObject',
requestType: request.apiMethods || 'ReplicateObject',
request,
};
return standardMetadataValidateBucketAndObj(mdValParams, request.actionImplicitDenies, log, next);

View File

@ -43,7 +43,7 @@ httpRequests = Stat(
noValue="0",
reduceCalc="mean",
targets=[Target(
expr='sum(rate(s3_cloudserver_http_requests_total{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
expr='sum(rate(http_requests_total{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
)],
thresholds=[
Threshold("green", 0, 0.0),
@ -60,9 +60,9 @@ successRate = GaugePanel(
noValue="-",
targets=[Target(
expr="\n".join([
'sum(rate(s3_cloudserver_http_requests_total{namespace="${namespace}", job=~"$job", code=~"2.."}[$__rate_interval])) * 100', # noqa: E501
'sum(rate(http_requests_total{namespace="${namespace}", job=~"$job", code=~"2.."}[$__rate_interval])) * 100', # noqa: E501
" /",
'sum(rate(s3_cloudserver_http_requests_total{namespace="${namespace}", job=~"$job"}[$__rate_interval]) > 0)', # noqa: E501
'sum(rate(http_requests_total{namespace="${namespace}", job=~"$job"}[$__rate_interval]) > 0)', # noqa: E501
]),
legendFormat="Success rate",
)],
@ -86,7 +86,7 @@ dataIngestionRate = Stat(
format="binBps",
reduceCalc="mean",
targets=[Target(
expr='-sum(deriv(s3_cloudserver_disk_available_bytes{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
expr='-sum(deriv(cloud_server_data_disk_available{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
)],
thresholds=[
Threshold("dark-purple", 0, 0.0),
@ -105,7 +105,7 @@ objectIngestionRate = Stat(
format="O/s",
reduceCalc="mean",
targets=[Target(
expr='sum(deriv(s3_cloudserver_objects_count{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
expr='sum(deriv(cloud_server_number_of_objects{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
)],
thresholds=[
Threshold("dark-purple", 0, 0.0),
@ -125,7 +125,7 @@ bucketsCounter = Stat(
noValue="-",
reduceCalc="lastNotNull",
targets=[Target(
expr='sum(s3_cloudserver_buckets_count{namespace="${namespace}", job="${reportJob}"})', # noqa: E501
expr='sum(cloud_server_number_of_buckets{namespace="${namespace}", job="${reportJob}"})', # noqa: E501
)],
thresholds=[
Threshold("#808080", 0, 0.0),
@ -146,7 +146,7 @@ objectsCounter = Stat(
noValue="-",
reduceCalc="lastNotNull",
targets=[Target(
expr='sum(s3_cloudserver_objects_count{namespace="${namespace}", job="${reportJob}"})', # noqa: E501
expr='sum(cloud_server_number_of_objects{namespace="${namespace}", job="${reportJob}"})', # noqa: E501
)],
thresholds=[
Threshold("#808080", 0, 0.0),
@ -181,8 +181,8 @@ lastReport = Stat(
targets=[Target(
expr="\n".join([
'time()',
'- max(s3_cloudserver_last_report_timestamp{namespace="${namespace}", job="${reportJob}"})', # noqa: E501
'+ (max(s3_cloudserver_last_report_timestamp{namespace="${namespace}", job="${reportJob}"})', # noqa: E501
'- max(cloud_server_last_report_timestamp{namespace="${namespace}", job="${reportJob}"})', # noqa: E501
'+ (max(cloud_server_last_report_timestamp{namespace="${namespace}", job="${reportJob}"})', # noqa: E501
' - max(kube_cronjob_status_last_schedule_time{namespace="${namespace}", cronjob="${countItemsJob}"})', # noqa: E501
' > 0 or vector(0))',
])
@ -207,7 +207,7 @@ def http_status_panel(title, code):
noValue="0",
reduceCalc="mean",
targets=[Target(
expr='sum(rate(s3_cloudserver_http_requests_total{namespace="${namespace}", job=~"$job",code=' + code + "}[$__rate_interval]))", # noqa: E501
expr='sum(rate(http_requests_total{namespace="${namespace}", job=~"$job",code=' + code + "}[$__rate_interval]))", # noqa: E501
)],
thresholds=[Threshold("semi-dark-blue", 0, 0.)],
)
@ -222,7 +222,7 @@ activeRequests = Stat(
dataSource="${DS_PROMETHEUS}",
reduceCalc="lastNotNull",
targets=[Target(
expr='sum(s3_cloudserver_http_active_requests{namespace="${namespace}", job=~"$job"})', # noqa: E501
expr='sum(http_active_requests{namespace="${namespace}", job=~"$job"})', # noqa: E501
)],
thresholds=[
Threshold("green", 0, 0.0),
@ -242,7 +242,7 @@ oobDataIngestionRate = Stat(
format="binBps",
reduceCalc="mean",
targets=[Target(
expr='sum(deriv(s3_cloudserver_ingested_bytes{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
expr='sum(deriv(cloud_server_data_ingested{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
)],
thresholds=[
Threshold("purple", 0, 0.0),
@ -261,7 +261,7 @@ oobObjectIngestionRate = Stat(
format="O/s",
reduceCalc="mean",
targets=[Target(
expr='sum(deriv(s3_cloudserver_ingested_objects_count{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
expr='sum(deriv(cloud_server_number_of_ingested_objects{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
)],
thresholds=[
Threshold("purple", 0, 0.0),
@ -275,7 +275,7 @@ httpStatusCodes = TimeSeries(
lineInterpolation="smooth",
unit=UNITS.OPS_PER_SEC,
targets=[Target(
expr='sum by (code) (rate(s3_cloudserver_http_requests_total{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
expr='sum by (code) (rate(http_requests_total{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
legendFormat="{{code}}",
)],
)
@ -284,7 +284,7 @@ httpStatusCodes = TimeSeries(
def http_aggregated_request_target(title, code):
# type: (str, str) -> Target
return Target(
expr='sum(rate(s3_cloudserver_http_requests_total{namespace="${namespace}", job=~"$job", code=' + code + "}[$__rate_interval]))", # noqa: E501
expr='sum(rate(http_requests_total{namespace="${namespace}", job=~"$job", code=' + code + "}[$__rate_interval]))", # noqa: E501
legendFormat=title,
)
@ -331,7 +331,7 @@ requestsByAction = TimeSeries(
unit=UNITS.OPS_PER_SEC,
targets=[
Target(
expr='sum(rate(s3_cloudserver_http_requests_total{namespace="${namespace}", job=~"$job"}[$__rate_interval])) by(action)', # noqa: E501
expr='sum(rate(http_requests_total{namespace="${namespace}", job=~"$job"}[$__rate_interval])) by(action)', # noqa: E501
legendFormat="{{action}}",
)
]
@ -345,7 +345,7 @@ requestsByMethod = PieChart(
unit=UNITS.SHORT,
targets=[
Target(
expr='sum(round(increase(s3_cloudserver_http_requests_total{namespace="${namespace}", job=~"$job"}[$__rate_interval]))) by(method)', # noqa: E501
expr='sum(round(increase(http_requests_total{namespace="${namespace}", job=~"$job"}[$__rate_interval]))) by(method)', # noqa: E501
legendFormat="{{method}}",
),
],
@ -358,9 +358,9 @@ def average_latency_target(title, action="", by=""):
by = " by (" + by + ")" if by else ""
return Target(
expr="\n".join([
'sum(rate(s3_cloudserver_http_request_duration_seconds_sum{namespace="${namespace}", job=~"$job"' + extra + "}[$__rate_interval]))" + by, # noqa: E501
'sum(rate(http_request_duration_seconds_sum{namespace="${namespace}", job=~"$job"' + extra + "}[$__rate_interval]))" + by, # noqa: E501
" /",
'sum(rate(s3_cloudserver_http_request_duration_seconds_count{namespace="${namespace}", job=~"$job"' + extra + "}[$__rate_interval]))" + by, # noqa: E501,
'sum(rate(http_request_duration_seconds_count{namespace="${namespace}", job=~"$job"' + extra + "}[$__rate_interval]))" + by, # noqa: E501,
]),
legendFormat=title,
)
@ -401,7 +401,7 @@ latenciesByAction = TimeSeries(
)
requestTime = Heatmap(
title="Request duration",
title="Request time",
dataSource="${DS_PROMETHEUS}",
dataFormat="tsbuckets",
maxDataPoints=25,
@ -409,7 +409,7 @@ requestTime = Heatmap(
yAxis=YAxis(format=UNITS.DURATION_SECONDS),
color=HeatmapColor(mode="opacity"),
targets=[Target(
expr='sum by(le) (increase(s3_cloudserver_http_request_duration_seconds_bucket{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
expr='sum by(le) (increase(http_request_duration_seconds_bucket{namespace="${namespace}", job=~"$job"}[$__interval]))', # noqa: E501
format="heatmap",
legendFormat="{{ le }}",
)],
@ -433,11 +433,11 @@ bandWidth = TimeSeries(
unit="binBps",
targets=[
Target(
expr='sum(rate(s3_cloudserver_http_response_size_bytes_sum{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
expr='sum(rate(http_response_size_bytes_sum{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
legendFormat="Out"
),
Target(
expr='sum(rate(s3_cloudserver_http_request_size_bytes_sum{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
expr='sum(rate(http_request_size_bytes_sum{namespace="${namespace}", job=~"$job"}[$__rate_interval]))', # noqa: E501
legendFormat="In"
)
],
@ -461,7 +461,7 @@ uploadChunkSize = BarGauge(
noValue="-",
orientation="vertical",
targets=[Target(
expr='avg(s3_cloudserver_http_request_size_bytes{namespace="${namespace}", job=~"$job"}) by (quantile)', # noqa: E501
expr='avg(http_request_size_bytes{namespace="${namespace}", job=~"$job"}) by (quantile)', # noqa: E501
legendFormat='{{ quantile }}',
)],
thresholds=[
@ -481,7 +481,7 @@ downloadChunkSize = BarGauge(
noValue="-",
orientation="vertical",
targets=[Target(
expr='avg(s3_cloudserver_http_response_size_bytes{namespace="${namespace}", job=~"$job"}) by (quantile)', # noqa: E501
expr='avg(http_response_size_bytes{namespace="${namespace}", job=~"$job"}) by (quantile)', # noqa: E501
legendFormat='{{ quantile }}',
)],
thresholds=[
@ -585,7 +585,7 @@ dashboard = (
label='Group',
multi=True,
name='job',
query='label_values(s3_cloudserver_http_active_requests{namespace="${namespace}", container="${container}"}, job)', # noqa: E501
query='label_values(http_active_requests{namespace="${namespace}", container="${container}"}, job)', # noqa: E501
regex='/(?<value>${zenkoName}-(?<text>\\w*).*)/',
),
Template(
@ -593,7 +593,7 @@ dashboard = (
hide=HIDE_VARIABLE,
label='pod',
name='pod',
query='label_values(s3_cloudserver_http_active_requests{namespace="${namespace}", container="${container}", job=~"$job"}, pod)', # noqa: E501
query='label_values(http_active_requests{namespace="${namespace}", container="${container}", job=~"$job"}, pod)', # noqa: E501
)
]),
panels=layout.column([

View File

@ -21,7 +21,7 @@
"dependencies": {
"@azure/storage-blob": "^12.12.0",
"@hapi/joi": "^17.1.0",
"arsenal": "git+https://github.com/scality/arsenal#8.1.121",
"arsenal": "git+https://github.com/scality/arsenal#e265d0b80ee27b13c4012a90777b0c77399db1fb",
"async": "~2.5.0",
"aws-sdk": "2.905.0",
"bucketclient": "scality/bucketclient#8.1.9",

View File

@ -117,6 +117,7 @@ describe('deleted flag bucket handling', () => {
bucketMD.setLocationConstraint(locationConstraint);
metadata.createBucket(bucketName, bucketMD, log, () => {
metadata.createBucket(usersBucketName, usersBucket, log, () => {
console.log('bucket created!!!!');
done();
});
});
@ -439,12 +440,14 @@ describe('deleted flag bucket handling', () => {
if (extraArgNeeded) {
return apiAction(authInfo, mpuRequest, undefined,
log, err => {
console.log(err);
assert.strictEqual(err.is.NoSuchUpload, true);
return done();
});
}
return apiAction(authInfo, mpuRequest,
log, err => {
console.log(err);
assert.strictEqual(err.is.NoSuchUpload, true);
return done();
});

View File

@ -789,9 +789,9 @@ arraybuffer.slice@~0.0.7:
optionalDependencies:
ioctl "^2.0.2"
"arsenal@git+https://github.com/scality/arsenal#8.1.121":
"arsenal@git+https://github.com/scality/arsenal#e265d0b80ee27b13c4012a90777b0c77399db1fb":
version "8.1.121"
resolved "git+https://github.com/scality/arsenal#5a5ef7c572a6546021e609b752e5644c166ad8a1"
resolved "git+https://github.com/scality/arsenal#e265d0b80ee27b13c4012a90777b0c77399db1fb"
dependencies:
"@azure/identity" "^3.1.1"
"@azure/storage-blob" "^12.12.0"