Compare commits
3 Commits
developmen
...
list-lifec
Author | SHA1 | Date |
---|---|---|
Nicolas Humbert | 41af186cf7 | |
Nicolas Humbert | 7c049b1329 | |
Nicolas Humbert | 371648ec0a |
|
@ -36,6 +36,12 @@
|
|||
}, {
|
||||
"site": "us-east-2",
|
||||
"type": "aws_s3"
|
||||
}, {
|
||||
"site": "aws-location",
|
||||
"type": "aws_s3"
|
||||
}, {
|
||||
"site": "location-dmf-v1",
|
||||
"type": "dmf"
|
||||
}],
|
||||
"backbeat": {
|
||||
"host": "localhost",
|
||||
|
|
|
@ -0,0 +1,148 @@
|
|||
const { versioning } = require('arsenal');
|
||||
const versionIdUtils = versioning.VersionID;
|
||||
|
||||
const CURRENT_TYPE = 'current';
|
||||
const NON_CURRENT_TYPE = 'noncurrent';
|
||||
const ORPHAN_TYPE = 'orphan';
|
||||
|
||||
function _makeTags(tags) {
|
||||
const res = [];
|
||||
Object.entries(tags).forEach(([key, value]) =>
|
||||
res.push(
|
||||
{
|
||||
Key: key,
|
||||
Value: value,
|
||||
}
|
||||
));
|
||||
return res;
|
||||
}
|
||||
|
||||
function processCurrents(bucketName, listParams, list) {
|
||||
const data = {
|
||||
Name: bucketName,
|
||||
Prefix: listParams.prefix,
|
||||
MaxKeys: listParams.maxKeys,
|
||||
IsTruncated: !!list.IsTruncated,
|
||||
KeyMarker: listParams.marker,
|
||||
BeforeDate: listParams.beforeDate,
|
||||
NextKeyMarker: list.NextKeyMarker,
|
||||
Contents: [],
|
||||
};
|
||||
|
||||
list.Contents.forEach(item => {
|
||||
const v = item.value;
|
||||
|
||||
const content = {
|
||||
Key: item.key,
|
||||
LastModified: v.LastModified,
|
||||
Etag: v.ETag,
|
||||
Size: v.Size,
|
||||
Owner: {
|
||||
ID: v.Owner.ID,
|
||||
DisplayName: v.Owner.DisplayName
|
||||
},
|
||||
StorageClass: v.StorageClass,
|
||||
TagSet: _makeTags(v.tags),
|
||||
IsLatest: true, // for compatibily
|
||||
DataStoreName: v.dataStoreName,
|
||||
ListType: CURRENT_TYPE,
|
||||
};
|
||||
data.Contents.push(content);
|
||||
});
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function processNonCurrents(bucketName, listParams, list) {
|
||||
let nextVersionIdMarker = list.NextVersionIdMarker;
|
||||
if (nextVersionIdMarker && nextVersionIdMarker !== 'null') {
|
||||
nextVersionIdMarker = versionIdUtils.encode(nextVersionIdMarker);
|
||||
}
|
||||
|
||||
let versionIdMarker = listParams.versionIdMarker;
|
||||
if (versionIdMarker && versionIdMarker !== 'null') {
|
||||
versionIdMarker = versionIdUtils.encode(versionIdMarker);
|
||||
}
|
||||
|
||||
const data = {
|
||||
Name: bucketName,
|
||||
Prefix: listParams.prefix,
|
||||
MaxKeys: listParams.maxKeys,
|
||||
IsTruncated: !!list.IsTruncated,
|
||||
KeyMarker: listParams.keyMarker,
|
||||
VersionIdMarker: versionIdMarker,
|
||||
BeforeDate: listParams.beforeDate,
|
||||
NextKeyMarker: list.NextKeyMarker,
|
||||
NextVersionIdMarker: nextVersionIdMarker,
|
||||
Contents: [],
|
||||
};
|
||||
|
||||
list.Contents.forEach(item => {
|
||||
const v = item.value;
|
||||
const versionId = (v.IsNull || v.VersionId === undefined) ?
|
||||
'null' : versionIdUtils.encode(v.VersionId);
|
||||
|
||||
const content = {
|
||||
Key: item.key,
|
||||
LastModified: v.LastModified,
|
||||
Etag: v.ETag,
|
||||
Size: v.Size,
|
||||
Owner: {
|
||||
ID: v.Owner.ID,
|
||||
DisplayName: v.Owner.DisplayName
|
||||
},
|
||||
StorageClass: v.StorageClass,
|
||||
TagSet: _makeTags(v.tags),
|
||||
staleDate: v.staleDate, // lowerCamelCase to be compatible with existing lifecycle.
|
||||
VersionId: versionId,
|
||||
DataStoreName: v.dataStoreName,
|
||||
ListType: NON_CURRENT_TYPE,
|
||||
};
|
||||
|
||||
data.Contents.push(content);
|
||||
});
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
function processOrphans(bucketName, listParams, list) {
|
||||
const data = {
|
||||
Name: bucketName,
|
||||
Prefix: listParams.prefix,
|
||||
MaxKeys: listParams.maxKeys,
|
||||
IsTruncated: !!list.IsTruncated,
|
||||
KeyMarker: listParams.keyMarker,
|
||||
BeforeDate: listParams.beforeDate,
|
||||
NextKeyMarker: list.NextKeyMarker,
|
||||
Contents: [],
|
||||
};
|
||||
|
||||
list.Contents.forEach(item => {
|
||||
const v = item.value;
|
||||
const versionId = (v.IsNull || v.VersionId === undefined) ?
|
||||
'null' : versionIdUtils.encode(v.VersionId);
|
||||
data.Contents.push({
|
||||
Key: item.key,
|
||||
LastModified: v.LastModified,
|
||||
Etag: v.ETag,
|
||||
Size: v.Size,
|
||||
Owner: {
|
||||
ID: v.Owner.ID,
|
||||
DisplayName: v.Owner.DisplayName
|
||||
},
|
||||
StorageClass: v.StorageClass,
|
||||
VersionId: versionId,
|
||||
IsLatest: true, // for compatibily
|
||||
DataStoreName: v.dataStoreName,
|
||||
ListType: ORPHAN_TYPE,
|
||||
});
|
||||
});
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
processCurrents,
|
||||
processNonCurrents,
|
||||
processOrphans,
|
||||
};
|
|
@ -0,0 +1,93 @@
|
|||
const { errors } = require('arsenal');
|
||||
const constants = require('../../../constants');
|
||||
const services = require('../../services');
|
||||
const { metadataValidateBucket } = require('../../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../../utapi/utilities');
|
||||
const monitoring = require('../../utilities/monitoringHandler');
|
||||
const { processCurrents } = require('../apiUtils/object/lifecycle');
|
||||
|
||||
|
||||
function handleResult(listParams, requestMaxKeys, authInfo,
|
||||
bucketName, list, log, callback) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
listParams.maxKeys = requestMaxKeys;
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
const res = processCurrents(bucketName, listParams, list);
|
||||
|
||||
pushMetric('listLifecycleCurrents', log, { authInfo, bucket: bucketName });
|
||||
monitoring.promMetrics('GET', bucketName, '200', 'listLifecycleCurrents');
|
||||
return callback(null, res);
|
||||
}
|
||||
|
||||
/**
|
||||
* listLifecycleCurrents - Return list of current versions/masters in bucket
|
||||
* @param {AuthInfo} authInfo - Instance of AuthInfo class with
|
||||
* requester's info
|
||||
* @param {object} request - http request object
|
||||
* @param {function} log - Werelogs request logger
|
||||
* @param {function} callback - callback to respond to http request
|
||||
* with either error code or xml response body
|
||||
* @return {undefined}
|
||||
*/
|
||||
function listLifecycleCurrents(authInfo, request, log, callback) {
|
||||
const params = request.query;
|
||||
const bucketName = request.bucketName;
|
||||
|
||||
log.debug('processing request', { method: 'listLifecycleCurrents' });
|
||||
const requestMaxKeys = params['max-keys'] ?
|
||||
Number.parseInt(params['max-keys'], 10) : 1000;
|
||||
if (Number.isNaN(requestMaxKeys) || requestMaxKeys < 0) {
|
||||
monitoring.promMetrics(
|
||||
'GET', bucketName, 400, 'listBucket');
|
||||
return callback(errors.InvalidArgument);
|
||||
}
|
||||
const actualMaxKeys = Math.min(constants.listingHardLimit, requestMaxKeys);
|
||||
|
||||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'listLifecycleCurrents',
|
||||
request,
|
||||
};
|
||||
const listParams = {
|
||||
listingType: 'DelimiterCurrent',
|
||||
maxKeys: actualMaxKeys,
|
||||
prefix: params.prefix,
|
||||
beforeDate: params['before-date'],
|
||||
marker: params['key-marker'],
|
||||
};
|
||||
|
||||
return metadataValidateBucket(metadataValParams, log, err => {
|
||||
if (err) {
|
||||
log.debug('error processing request', { method: 'metadataValidateBucket', error: err });
|
||||
monitoring.promMetrics(
|
||||
'GET', bucketName, err.code, 'listLifecycleCurrents');
|
||||
return callback(err, null);
|
||||
}
|
||||
|
||||
if (!requestMaxKeys) {
|
||||
const emptyList = {
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
};
|
||||
return handleResult(listParams, requestMaxKeys, authInfo,
|
||||
bucketName, emptyList, log, callback);
|
||||
}
|
||||
|
||||
return services.getLifecycleListing(bucketName, listParams, log,
|
||||
(err, list) => {
|
||||
if (err) {
|
||||
log.debug('error processing request', { method: 'services.getLifecycleListing', error: err });
|
||||
monitoring.promMetrics(
|
||||
'GET', bucketName, err.code, 'listLifecycleCurrents');
|
||||
return callback(err, null);
|
||||
}
|
||||
return handleResult(listParams, requestMaxKeys, authInfo,
|
||||
bucketName, list, log, callback);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
listLifecycleCurrents,
|
||||
};
|
|
@ -0,0 +1,104 @@
|
|||
const { errors, versioning } = require('arsenal');
|
||||
const constants = require('../../../constants');
|
||||
const services = require('../../services');
|
||||
const { metadataValidateBucket } = require('../../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../../utapi/utilities');
|
||||
const versionIdUtils = versioning.VersionID;
|
||||
const monitoring = require('../../utilities/monitoringHandler');
|
||||
const { processNonCurrents } = require('../apiUtils/object/lifecycle');
|
||||
|
||||
function handleResult(listParams, requestMaxKeys, authInfo,
|
||||
bucketName, list, log, callback) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
listParams.maxKeys = requestMaxKeys;
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
const res = processNonCurrents(bucketName, listParams, list);
|
||||
|
||||
pushMetric('listLifecycleNonCurrents', log, { authInfo, bucket: bucketName });
|
||||
monitoring.promMetrics('GET', bucketName, '200', 'listLifecycleNonCurrents');
|
||||
return callback(null, res);
|
||||
}
|
||||
|
||||
/**
|
||||
* listLifecycleNonCurrents - Return list of non-current versions in bucket
|
||||
* @param {AuthInfo} authInfo - Instance of AuthInfo class with
|
||||
* requester's info
|
||||
* @param {object} request - http request object
|
||||
* @param {function} log - Werelogs request logger
|
||||
* @param {function} callback - callback to respond to http request
|
||||
* with either error code or xml response body
|
||||
* @return {undefined}
|
||||
*/
|
||||
function listLifecycleNonCurrents(authInfo, request, log, callback) {
|
||||
const params = request.query;
|
||||
const bucketName = request.bucketName;
|
||||
|
||||
log.debug('processing request', { method: 'listLifecycleNonCurrents' });
|
||||
const requestMaxKeys = params['max-keys'] ?
|
||||
Number.parseInt(params['max-keys'], 10) : 1000;
|
||||
if (Number.isNaN(requestMaxKeys) || requestMaxKeys < 0) {
|
||||
monitoring.promMetrics(
|
||||
'GET', bucketName, 400, 'listBucket');
|
||||
return callback(errors.InvalidArgument);
|
||||
}
|
||||
const actualMaxKeys = Math.min(constants.listingHardLimit, requestMaxKeys);
|
||||
|
||||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'listLifecycleNonCurrents',
|
||||
request,
|
||||
};
|
||||
const listParams = {
|
||||
listingType: 'DelimiterNonCurrent',
|
||||
maxKeys: actualMaxKeys,
|
||||
prefix: params.prefix,
|
||||
beforeDate: params['before-date'],
|
||||
keyMarker: params['key-marker'],
|
||||
};
|
||||
|
||||
listParams.versionIdMarker = params['version-id-marker'] ?
|
||||
versionIdUtils.decode(params['version-id-marker']) : undefined;
|
||||
|
||||
return metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
if (err) {
|
||||
log.debug('error processing request', { method: 'metadataValidateBucket', error: err });
|
||||
monitoring.promMetrics(
|
||||
'GET', bucketName, err.code, 'listLifecycleNonCurrents');
|
||||
return callback(err, null);
|
||||
}
|
||||
|
||||
const vcfg = bucket.getVersioningConfiguration();
|
||||
const isBucketVersioned = vcfg && (vcfg.Status === 'Enabled' || vcfg.Status === 'Suspended');
|
||||
if (!isBucketVersioned) {
|
||||
log.debug('bucket is not versioned');
|
||||
return callback(errors.InvalidRequest.customizeDescription(
|
||||
'bucket is not versioned'), null);
|
||||
}
|
||||
|
||||
if (!requestMaxKeys) {
|
||||
const emptyList = {
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
};
|
||||
return handleResult(listParams, requestMaxKeys, authInfo,
|
||||
bucketName, emptyList, log, callback);
|
||||
}
|
||||
|
||||
return services.getLifecycleListing(bucketName, listParams, log,
|
||||
(err, list) => {
|
||||
if (err) {
|
||||
log.debug('error processing request', { method: 'services.getLifecycleListing', error: err });
|
||||
monitoring.promMetrics(
|
||||
'GET', bucketName, err.code, 'listLifecycleNonCurrents');
|
||||
return callback(err, null);
|
||||
}
|
||||
return handleResult(listParams, requestMaxKeys, authInfo,
|
||||
bucketName, list, log, callback);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
listLifecycleNonCurrents,
|
||||
};
|
|
@ -0,0 +1,100 @@
|
|||
const { errors } = require('arsenal');
|
||||
const constants = require('../../../constants');
|
||||
const services = require('../../services');
|
||||
const { metadataValidateBucket } = require('../../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../../utapi/utilities');
|
||||
const monitoring = require('../../utilities/monitoringHandler');
|
||||
const { processOrphans } = require('../apiUtils/object/lifecycle');
|
||||
|
||||
function handleResult(listParams, requestMaxKeys, authInfo,
|
||||
bucketName, list, log, callback) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
listParams.maxKeys = requestMaxKeys;
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
const res = processOrphans(bucketName, listParams, list);
|
||||
|
||||
pushMetric('listLifecycleOrphans', log, { authInfo, bucket: bucketName });
|
||||
monitoring.promMetrics('GET', bucketName, '200', 'listLifecycleOrphans');
|
||||
return callback(null, res);
|
||||
}
|
||||
|
||||
/**
|
||||
* listLifecycleOrphans - Return list of expired object delete marker in bucket
|
||||
* @param {AuthInfo} authInfo - Instance of AuthInfo class with
|
||||
* requester's info
|
||||
* @param {object} request - http request object
|
||||
* @param {function} log - Werelogs request logger
|
||||
* @param {function} callback - callback to respond to http request
|
||||
* with either error code or xml response body
|
||||
* @return {undefined}
|
||||
*/
|
||||
function listLifecycleOrphans(authInfo, request, log, callback) {
|
||||
const params = request.query;
|
||||
const bucketName = request.bucketName;
|
||||
|
||||
log.debug('processing request', { method: 'listLifecycleOrphans' });
|
||||
const requestMaxKeys = params['max-keys'] ?
|
||||
Number.parseInt(params['max-keys'], 10) : 1000;
|
||||
if (Number.isNaN(requestMaxKeys) || requestMaxKeys < 0) {
|
||||
monitoring.promMetrics(
|
||||
'GET', bucketName, 400, 'listBucket');
|
||||
return callback(errors.InvalidArgument);
|
||||
}
|
||||
const actualMaxKeys = Math.min(constants.listingHardLimit, requestMaxKeys);
|
||||
|
||||
const metadataValParams = {
|
||||
authInfo,
|
||||
bucketName,
|
||||
requestType: 'listLifecycleOrphans',
|
||||
request,
|
||||
};
|
||||
const listParams = {
|
||||
listingType: 'DelimiterOrphan',
|
||||
maxKeys: actualMaxKeys,
|
||||
prefix: params.prefix,
|
||||
beforeDate: params['before-date'],
|
||||
keyMarker: params['key-marker'],
|
||||
};
|
||||
|
||||
return metadataValidateBucket(metadataValParams, log, (err, bucket) => {
|
||||
if (err) {
|
||||
log.debug('error processing request', { method: 'metadataValidateBucket', error: err });
|
||||
monitoring.promMetrics(
|
||||
'GET', bucketName, err.code, 'listLifecycleOrphans');
|
||||
return callback(err, null);
|
||||
}
|
||||
|
||||
const vcfg = bucket.getVersioningConfiguration();
|
||||
const isBucketVersioned = vcfg && (vcfg.Status === 'Enabled' || vcfg.Status === 'Suspended');
|
||||
if (!isBucketVersioned) {
|
||||
log.debug('bucket is not versioned or suspended');
|
||||
return callback(errors.InvalidRequest.customizeDescription(
|
||||
'bucket is not versioned'), null);
|
||||
}
|
||||
|
||||
if (!requestMaxKeys) {
|
||||
const emptyList = {
|
||||
Contents: [],
|
||||
IsTruncated: false,
|
||||
};
|
||||
return handleResult(listParams, requestMaxKeys, authInfo,
|
||||
bucketName, emptyList, log, callback);
|
||||
}
|
||||
|
||||
return services.getLifecycleListing(bucketName, listParams, log,
|
||||
(err, list) => {
|
||||
if (err) {
|
||||
log.debug('error processing request', { error: err });
|
||||
monitoring.promMetrics(
|
||||
'GET', bucketName, err.code, 'listLifecycleOrphans');
|
||||
return callback(err, null);
|
||||
}
|
||||
return handleResult(listParams, requestMaxKeys, authInfo,
|
||||
bucketName, list, log, callback);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
listLifecycleOrphans,
|
||||
};
|
|
@ -32,6 +32,15 @@ const constants = require('../../constants');
|
|||
const { BackendInfo } = models;
|
||||
const { pushReplicationMetric } = require('./utilities/pushReplicationMetric');
|
||||
const kms = require('../kms/wrapper');
|
||||
const { listLifecycleCurrents } = require('../api/backbeat/listLifecycleCurrents');
|
||||
const { listLifecycleNonCurrents } = require('../api/backbeat/listLifecycleNonCurrents');
|
||||
const { listLifecycleOrphans } = require('../api/backbeat/listLifecycleOrphans');
|
||||
|
||||
const lifecycleTypeCalls = {
|
||||
'current': listLifecycleCurrents,
|
||||
'noncurrent': listLifecycleNonCurrents,
|
||||
'orphan': listLifecycleOrphans,
|
||||
};
|
||||
|
||||
auth.setHandler(vault);
|
||||
|
||||
|
@ -320,6 +329,9 @@ POST /_/backbeat/multiplebackenddata/<bucket name>/<object key>
|
|||
?operation=puttagging
|
||||
GET /_/backbeat/multiplebackendmetadata/<bucket name>/<object key>
|
||||
POST /_/backbeat/batchdelete
|
||||
GET /_/backbeat/lifecycle/<bucket name>?list-type=current
|
||||
GET /_/backbeat/lifecycle/<bucket name>?list-type=noncurrent
|
||||
GET /_/backbeat/lifecycle/<bucket name>?list-type=orphan
|
||||
*/
|
||||
|
||||
function _getLastModified(locations, log, cb) {
|
||||
|
@ -1017,7 +1029,7 @@ function _shouldConditionallyDelete(request, locations) {
|
|||
return isExternalBackend && isNotVersioned;
|
||||
}
|
||||
|
||||
function batchDelete(request, response, log, callback) {
|
||||
function batchDelete(request, response, userInfo, log, callback) {
|
||||
return _getRequestPayload(request, (err, payload) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
|
@ -1069,6 +1081,35 @@ function batchDelete(request, response, log, callback) {
|
|||
});
|
||||
}
|
||||
|
||||
function listLifecycle(request, response, userInfo, log, cb) {
|
||||
if (!request.query || !request.query['list-type']) {
|
||||
const errMessage = 'bad request: missing list-type query parameter';
|
||||
log.error(errMessage);
|
||||
return cb(errors.BadRequest.customizeDescription(errMessage));
|
||||
}
|
||||
const listType = request.query['list-type'];
|
||||
|
||||
let call;
|
||||
if (lifecycleTypeCalls[listType]) {
|
||||
call = lifecycleTypeCalls[listType];
|
||||
} else {
|
||||
const errMessage = `bad request: invalid list-type query parameter: ${listType}`;
|
||||
log.error(errMessage);
|
||||
return cb(errors.BadRequest.customizeDescription(errMessage));
|
||||
}
|
||||
|
||||
return call(userInfo, request, log, (err, data) => {
|
||||
if (err) {
|
||||
log.error(`error during listing objects for lifecycle: ${listType}`, {
|
||||
error: err,
|
||||
method: 'handleTaggingOperation',
|
||||
});
|
||||
return cb(err);
|
||||
}
|
||||
return _respond(response, data, log, cb);
|
||||
});
|
||||
}
|
||||
|
||||
const backbeatRoutes = {
|
||||
PUT: {
|
||||
data: putData,
|
||||
|
@ -1096,6 +1137,7 @@ const backbeatRoutes = {
|
|||
GET: {
|
||||
metadata: getMetadata,
|
||||
multiplebackendmetadata: headObject,
|
||||
lifecycle: listLifecycle,
|
||||
},
|
||||
};
|
||||
|
||||
|
@ -1183,25 +1225,26 @@ function routeBackbeat(clientIP, request, response, log) {
|
|||
});
|
||||
return responseJSONBody(errors.MethodNotAllowed, null, response, log);
|
||||
}
|
||||
|
||||
if (!_isObjectRequest(request)) {
|
||||
const route = backbeatRoutes[request.method][request.resourceType];
|
||||
return route(request, response, log, err => {
|
||||
if (err) {
|
||||
return responseJSONBody(err, null, response, log);
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
}
|
||||
const decodedVidResult = decodeVersionId(request.query);
|
||||
if (decodedVidResult instanceof Error) {
|
||||
log.trace('invalid versionId query', {
|
||||
versionId: request.query.versionId,
|
||||
error: decodedVidResult,
|
||||
});
|
||||
return responseJSONBody(errors.InvalidArgument, null, response, log);
|
||||
}
|
||||
const versionId = decodedVidResult;
|
||||
// // TODO: understand why batchdelete is not authenticated
|
||||
// // TODO: listLifecycle need to be authenticated.
|
||||
// if (!_isObjectRequest(request)) {
|
||||
// const route = backbeatRoutes[request.method][request.resourceType];
|
||||
// return route(request, response, log, err => {
|
||||
// if (err) {
|
||||
// return responseJSONBody(err, null, response, log);
|
||||
// }
|
||||
// return undefined;
|
||||
// });
|
||||
// }
|
||||
// const decodedVidResult = decodeVersionId(request.query);
|
||||
// if (decodedVidResult instanceof Error) {
|
||||
// log.trace('invalid versionId query', {
|
||||
// versionId: request.query.versionId,
|
||||
// error: decodedVidResult,
|
||||
// });
|
||||
// return responseJSONBody(errors.InvalidArgument, null, response, log);
|
||||
// }
|
||||
// const versionId = decodedVidResult;
|
||||
return async.waterfall([next => auth.server.doAuth(
|
||||
request, log, (err, userInfo) => {
|
||||
if (err) {
|
||||
|
@ -1215,6 +1258,26 @@ function routeBackbeat(clientIP, request, response, log) {
|
|||
return next(err, userInfo);
|
||||
}, 's3', requestContexts),
|
||||
(userInfo, next) => {
|
||||
// TODO: understand why non-object request (batchdelete) were not authenticated
|
||||
if (!_isObjectRequest(request)) {
|
||||
const route = backbeatRoutes[request.method][request.resourceType];
|
||||
return route(request, response, userInfo, log, err => {
|
||||
if (err) {
|
||||
return responseJSONBody(err, null, response, log);
|
||||
}
|
||||
return undefined;
|
||||
});
|
||||
}
|
||||
|
||||
const decodedVidResult = decodeVersionId(request.query);
|
||||
if (decodedVidResult instanceof Error) {
|
||||
log.trace('invalid versionId query', {
|
||||
versionId: request.query.versionId,
|
||||
error: decodedVidResult,
|
||||
});
|
||||
return responseJSONBody(errors.InvalidArgument, null, response, log);
|
||||
}
|
||||
const versionId = decodedVidResult;
|
||||
if (useMultipleBackend) {
|
||||
// Bucket and object do not exist in metadata.
|
||||
return next(null, null, null);
|
||||
|
|
|
@ -365,6 +365,20 @@ const services = {
|
|||
});
|
||||
},
|
||||
|
||||
getLifecycleListing(bucketName, listingParams, log, cb) {
|
||||
assert.strictEqual(typeof bucketName, 'string');
|
||||
log.trace('performing metadata get lifecycle object listing',
|
||||
{ listingParams });
|
||||
metadata.listLifecycleObject(bucketName, listingParams, log,
|
||||
(err, listResponse) => {
|
||||
if (err) {
|
||||
log.debug('error from metadata', { error: err });
|
||||
return cb(err);
|
||||
}
|
||||
return cb(null, listResponse);
|
||||
});
|
||||
},
|
||||
|
||||
metadataStoreMPObject(bucketName, cipherBundle, params, log, cb) {
|
||||
assert.strictEqual(typeof bucketName, 'string');
|
||||
assert.strictEqual(typeof params.splitter, 'string');
|
||||
|
|
|
@ -1,105 +1,40 @@
|
|||
{
|
||||
"us-east-1": {
|
||||
"type": "file",
|
||||
"objectId": "us-east-1",
|
||||
"legacyAwsBehavior": true,
|
||||
"details": {}
|
||||
},
|
||||
"us-east-2": {
|
||||
"type": "file",
|
||||
"objectId": "us-east-2",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"us-west-1": {
|
||||
"type": "file",
|
||||
"objectId": "us-west-1",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"us-west-2": {
|
||||
"type": "file",
|
||||
"objectId": "us-west-2",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"ca-central-1": {
|
||||
"type": "file",
|
||||
"objectId": "ca-central-1",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"cn-north-1": {
|
||||
"type": "file",
|
||||
"objectId": "cn-north-1",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"ap-south-1": {
|
||||
"type": "file",
|
||||
"objectId": "ap-south-1",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"ap-northeast-1": {
|
||||
"type": "file",
|
||||
"objectId": "ap-northeast-1",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"ap-northeast-2": {
|
||||
"type": "file",
|
||||
"objectId": "ap-northeast-2",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"ap-southeast-1": {
|
||||
"type": "file",
|
||||
"objectId": "ap-southeast-1",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"ap-southeast-2": {
|
||||
"type": "file",
|
||||
"objectId": "ap-southeast-2",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"eu-central-1": {
|
||||
"type": "file",
|
||||
"objectId": "eu-central-1",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"eu-west-1": {
|
||||
"type": "file",
|
||||
"objectId": "eu-west-1",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"eu-west-2": {
|
||||
"type": "file",
|
||||
"objectId": "eu-west-2",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"EU": {
|
||||
"type": "file",
|
||||
"objectId": "EU",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
},
|
||||
"sa-east-1": {
|
||||
"type": "file",
|
||||
"objectId": "sa-east-1",
|
||||
"legacyAwsBehavior": false,
|
||||
"details": {}
|
||||
"details": {
|
||||
"supportsVersioning": true
|
||||
},
|
||||
"isTransient": false,
|
||||
"legacyAwsBehavior": false,
|
||||
"objectId": "0b1d9226-a694-11eb-bc21-baec55d199cd",
|
||||
"type": "file"
|
||||
},
|
||||
"location-dmf-v1": {
|
||||
"type": "dmf",
|
||||
"objectId": "location-dmf-v1",
|
||||
"legacyAwsBehavior": false,
|
||||
"isCold": true,
|
||||
"details": {}
|
||||
"details": {
|
||||
"endpoint": "ws://localhost:5001/session",
|
||||
"username": "user1",
|
||||
"password": "pass1",
|
||||
"repoId": [
|
||||
"233aead6-1d7b-4647-a7cf-0d3280b5d1d7",
|
||||
"81e78de8-df11-4acd-8ad1-577ff05a68db"
|
||||
],
|
||||
"nsId": "65f9fd61-42fe-4a68-9ac0-6ba25311cc85"
|
||||
}
|
||||
},
|
||||
"aws-location": {
|
||||
"type": "aws_s3",
|
||||
"legacyAwsBehavior": true,
|
||||
"objectId": "2b1d9226-a694-11eb-bc21-baec55d199cd",
|
||||
"details": {
|
||||
"awsEndpoint": "s3.amazonaws.com",
|
||||
"bucketName": "n2b-versioned",
|
||||
"bucketMatch": false,
|
||||
"credentialsProfile": "aws",
|
||||
"serverSideEncryption": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -21,7 +21,7 @@
|
|||
"dependencies": {
|
||||
"@azure/storage-blob": "^12.12.0",
|
||||
"@hapi/joi": "^17.1.0",
|
||||
"arsenal": "git+https://github.com/scality/arsenal#8.1.84",
|
||||
"arsenal": "git+https://github.com/scality/Arsenal#feature/ARSN-312/listLifecycleOrphan",
|
||||
"async": "~2.5.0",
|
||||
"aws-sdk": "2.905.0",
|
||||
"bucketclient": "scality/bucketclient#8.1.5",
|
||||
|
@ -78,6 +78,7 @@
|
|||
"ft_awssdk_versioning": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/versioning/",
|
||||
"ft_awssdk_external_backends": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/multipleBackend",
|
||||
"ft_mixed_bucket_format_version": "cd tests/functional/metadata && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json MixedVersionFormat.js",
|
||||
"ft_backbeat": "cd tests/functional/backbeat && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json -t 40000 *.js",
|
||||
"ft_management": "cd tests/functional/report && yarn test",
|
||||
"ft_node": "cd tests/functional/raw-node && yarn test",
|
||||
"ft_node_routes": "cd tests/functional/raw-node && npm run test-routes",
|
||||
|
@ -86,7 +87,7 @@
|
|||
"ft_s3cmd": "cd tests/functional/s3cmd && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json -t 40000 *.js",
|
||||
"ft_s3curl": "cd tests/functional/s3curl && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json -t 40000 *.js",
|
||||
"ft_util": "cd tests/functional/utilities && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json -t 40000 *.js",
|
||||
"ft_test": "npm-run-all -s ft_awssdk ft_s3cmd ft_s3curl ft_node ft_healthchecks ft_management ft_util",
|
||||
"ft_test": "npm-run-all -s ft_awssdk ft_s3cmd ft_s3curl ft_node ft_healthchecks ft_management ft_util ft_backbeat",
|
||||
"ft_search": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json -t 90000 test/mdSearch",
|
||||
"ft_kmip": "cd tests/functional/kmip && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json -t 40000 *.js",
|
||||
"install_ft_deps": "yarn install aws-sdk@2.28.0 bluebird@3.3.1 mocha@2.3.4 mocha-junit-reporter@1.23.1 tv4@1.2.7",
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"default": {
|
||||
"accessKey": "accessKey1",
|
||||
"secretKey": "verySecretKey1"
|
||||
"accessKey": "WLI8X7JGPU1AWQEQIKM5",
|
||||
"secretKey": "0Src2X+kIrR1SUo/NhR5o1V4hqU1dtlePBHAcCbV"
|
||||
},
|
||||
"lisa": {
|
||||
"accessKey": "accessKey2",
|
||||
|
|
|
@ -0,0 +1,294 @@
|
|||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
const BucketUtility = require('../aws-node-sdk/lib/utility/bucket-util');
|
||||
const { removeAllVersions } = require('../aws-node-sdk/lib/utility/versioning-util');
|
||||
const { makeBackbeatRequest } = require('./utils');
|
||||
|
||||
const testBucket = 'bucket-for-list-lifecycle-current-tests';
|
||||
const emptyBucket = 'empty-bucket-for-list-lifecycle-current-tests';
|
||||
|
||||
const credentials = {
|
||||
accessKey: 'WLI8X7JGPU1AWQEQIKM5',
|
||||
secretKey: '0Src2X+kIrR1SUo/NhR5o1V4hqU1dtlePBHAcCbV',
|
||||
};
|
||||
|
||||
function checkContents(contents) {
|
||||
contents.forEach(d => {
|
||||
assert(d.Key);
|
||||
assert(d.LastModified);
|
||||
assert(d.Etag);
|
||||
assert(d.Owner.DisplayName);
|
||||
assert(d.Owner.ID);
|
||||
assert(d.StorageClass);
|
||||
assert.strictEqual(d.StorageClass, 'STANDARD');
|
||||
assert.deepStrictEqual(d.TagSet, [{
|
||||
Key: 'mykey',
|
||||
Value: 'myvalue',
|
||||
}]);
|
||||
assert.strictEqual(d.IsLatest, true);
|
||||
assert.strictEqual(d.DataStoreName, 'us-east-1');
|
||||
assert.strictEqual(d.ListType, 'current');
|
||||
assert.strictEqual(d.Size, 3);
|
||||
});
|
||||
}
|
||||
|
||||
['Enabled', 'Disabled'].forEach(versioning => {
|
||||
describe(`listLifecycleCurrents with bucket versioning ${versioning}`, () => {
|
||||
let bucketUtil;
|
||||
let s3;
|
||||
let date;
|
||||
|
||||
before(done => {
|
||||
bucketUtil = new BucketUtility('account1', { signatureVersion: 'v4' });
|
||||
s3 = bucketUtil.s3;
|
||||
|
||||
return async.series([
|
||||
next => s3.createBucket({ Bucket: testBucket }, next),
|
||||
next => s3.createBucket({ Bucket: emptyBucket }, next),
|
||||
next => {
|
||||
if (versioning !== 'Enabled') {
|
||||
return process.nextTick(next);
|
||||
}
|
||||
return s3.putBucketVersioning({
|
||||
Bucket: testBucket,
|
||||
VersioningConfiguration: { Status: 'Enabled' },
|
||||
}, next);
|
||||
},
|
||||
next => {
|
||||
if (versioning !== 'Enabled') {
|
||||
return process.nextTick(next);
|
||||
}
|
||||
return s3.putBucketVersioning({
|
||||
Bucket: emptyBucket,
|
||||
VersioningConfiguration: { Status: 'Enabled' },
|
||||
}, next);
|
||||
},
|
||||
next => async.times(3, (n, cb) => {
|
||||
s3.putObject({ Bucket: testBucket, Key: `oldkey${n}`, Body: '123', Tagging: 'mykey=myvalue' }, cb);
|
||||
}, next),
|
||||
next => {
|
||||
date = new Date(Date.now()).toISOString();
|
||||
return async.times(5, (n, cb) => {
|
||||
s3.putObject({ Bucket: testBucket, Key: `key${n}`, Body: '123', Tagging: 'mykey=myvalue' }, cb);
|
||||
}, next);
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
after(done => async.series([
|
||||
next => removeAllVersions({ Bucket: testBucket }, next),
|
||||
next => s3.deleteBucket({ Bucket: testBucket }, next),
|
||||
next => s3.deleteBucket({ Bucket: emptyBucket }, next),
|
||||
], done));
|
||||
|
||||
it('should return empty list of current versions if bucket is empty', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: emptyBucket,
|
||||
queryObj: { 'list-type': 'current' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Contents.length, 0);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty list of current versions if prefix does not apply', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'current', prefix: 'unknown' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Contents.length, 0);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return NoSuchBucket error if bucket does not exist', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: 'idonotexist',
|
||||
queryObj: { 'list-type': 'current' },
|
||||
authCredentials: credentials,
|
||||
}, err => {
|
||||
assert.strictEqual(err.code, 'NoSuchBucket');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return InvalidArgument error if max-keys is invalid', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'current', 'max-keys': 'a' },
|
||||
authCredentials: credentials,
|
||||
}, err => {
|
||||
assert.strictEqual(err.code, 'InvalidArgument');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return all the current versions', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'current' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
|
||||
const contents = data.Contents;
|
||||
assert.strictEqual(contents.length, 8);
|
||||
checkContents(contents);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return all the current versions with prefix old', done => {
|
||||
const prefix = 'old';
|
||||
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'current', prefix },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Prefix, prefix);
|
||||
|
||||
const contents = data.Contents;
|
||||
assert.strictEqual(contents.length, 3);
|
||||
checkContents(contents);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the current versions before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'current', 'before-date': date },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Contents.length, 3);
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
|
||||
const contents = data.Contents;
|
||||
checkContents(contents);
|
||||
assert.strictEqual(contents[0].Key, 'oldkey0');
|
||||
assert.strictEqual(contents[1].Key, 'oldkey1');
|
||||
assert.strictEqual(contents[2].Key, 'oldkey2');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should truncate list of current versions before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'current', 'before-date': date, 'max-keys': '1' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'oldkey0');
|
||||
assert.strictEqual(data.MaxKeys, 1);
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
|
||||
const contents = data.Contents;
|
||||
checkContents(contents);
|
||||
assert.strictEqual(contents[0].Key, 'oldkey0');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the next truncate list of current versions before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'current', 'before-date': date, 'max-keys': '1', 'key-marker': 'oldkey0' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.KeyMarker, 'oldkey0');
|
||||
assert.strictEqual(data.NextKeyMarker, 'oldkey1');
|
||||
assert.strictEqual(data.MaxKeys, 1);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
|
||||
const contents = data.Contents;
|
||||
checkContents(contents);
|
||||
assert.strictEqual(contents[0].Key, 'oldkey1');
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the last truncate list of current versions before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'current', 'before-date': date, 'max-keys': '1', 'key-marker': 'oldkey1' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.MaxKeys, 1);
|
||||
assert.strictEqual(data.KeyMarker, 'oldkey1');
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
|
||||
const contents = data.Contents;
|
||||
assert.strictEqual(contents.length, 1);
|
||||
checkContents(contents);
|
||||
assert.strictEqual(contents[0].Key, 'oldkey2');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,509 @@
|
|||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
const BucketUtility = require('../aws-node-sdk/lib/utility/bucket-util');
|
||||
const { removeAllVersions } = require('../aws-node-sdk/lib/utility/versioning-util');
|
||||
const { makeBackbeatRequest } = require('./utils');
|
||||
|
||||
const testBucket = 'bucket-for-list-lifecycle-noncurrent-tests';
|
||||
const emptyBucket = 'empty-bucket-for-list-lifecycle-noncurrent-tests';
|
||||
const nonVersionedBucket = 'non-versioned-bucket-for-list-lifecycle-noncurrent-tests';
|
||||
|
||||
const credentials = {
|
||||
accessKey: 'WLI8X7JGPU1AWQEQIKM5',
|
||||
secretKey: '0Src2X+kIrR1SUo/NhR5o1V4hqU1dtlePBHAcCbV',
|
||||
};
|
||||
|
||||
function checkContents(contents) {
|
||||
contents.forEach(d => {
|
||||
assert(d.Key);
|
||||
assert(d.LastModified);
|
||||
assert(d.Etag);
|
||||
assert(d.Owner.DisplayName);
|
||||
assert(d.Owner.ID);
|
||||
assert(d.StorageClass);
|
||||
assert.strictEqual(d.StorageClass, 'STANDARD');
|
||||
assert(d.VersionId);
|
||||
assert(d.staleDate);
|
||||
assert(!d.IsLatest);
|
||||
assert.deepStrictEqual(d.TagSet, [{
|
||||
Key: 'mykey',
|
||||
Value: 'myvalue',
|
||||
}]);
|
||||
assert.strictEqual(d.DataStoreName, 'us-east-1');
|
||||
assert.strictEqual(d.ListType, 'noncurrent');
|
||||
assert.strictEqual(d.Size, 3);
|
||||
});
|
||||
}
|
||||
|
||||
describe('listLifecycleNonCurrents', () => {
|
||||
let bucketUtil;
|
||||
let s3;
|
||||
let date;
|
||||
let expectedKey1VersionIds = [];
|
||||
let expectedKey2VersionIds = [];
|
||||
|
||||
before(done => {
|
||||
bucketUtil = new BucketUtility('account1', { signatureVersion: 'v4' });
|
||||
s3 = bucketUtil.s3;
|
||||
|
||||
return async.series([
|
||||
next => s3.createBucket({ Bucket: testBucket }, next),
|
||||
next => s3.createBucket({ Bucket: emptyBucket }, next),
|
||||
next => s3.createBucket({ Bucket: nonVersionedBucket }, next),
|
||||
next => s3.putBucketVersioning({
|
||||
Bucket: testBucket,
|
||||
VersioningConfiguration: { Status: 'Enabled' },
|
||||
}, next),
|
||||
next => s3.putBucketVersioning({
|
||||
Bucket: emptyBucket,
|
||||
VersioningConfiguration: { Status: 'Enabled' },
|
||||
}, next),
|
||||
next => async.timesSeries(3, (n, cb) => {
|
||||
s3.putObject({ Bucket: testBucket, Key: 'key1', Body: '123', Tagging: 'mykey=myvalue' }, cb);
|
||||
}, (err, res) => {
|
||||
// Only the two first ones are kept, since the stale date of the last one (3rd)
|
||||
// Will be the last-modified of the next one (4th) that is created after the "date".
|
||||
// The array is reverse since, for a specific key, we expect the listing to be ordered
|
||||
// by last-modified date in descending order due to the way version id is generated.
|
||||
expectedKey1VersionIds = res.map(r => r.VersionId).slice(0, 2).reverse();
|
||||
return next(err);
|
||||
}),
|
||||
next => async.timesSeries(3, (n, cb) => {
|
||||
s3.putObject({ Bucket: testBucket, Key: 'key2', Body: '123', Tagging: 'mykey=myvalue' }, cb);
|
||||
}, (err, res) => {
|
||||
// Only the two first ones are kept, since the stale date of the last one (3rd)
|
||||
// Will be the last-modified of the next one (4th) that is created after the "date".
|
||||
// The array is reverse since, for a specific key, we expect the listing to be ordered
|
||||
// by last-modified date in descending order due to the way version id is generated.
|
||||
expectedKey2VersionIds = res.map(r => r.VersionId).slice(0, 2).reverse();
|
||||
return next(err);
|
||||
}),
|
||||
next => {
|
||||
date = new Date(Date.now()).toISOString();
|
||||
return async.times(5, (n, cb) => {
|
||||
s3.putObject({ Bucket: testBucket, Key: 'key1', Body: '123', Tagging: 'mykey=myvalue' }, cb);
|
||||
}, next);
|
||||
},
|
||||
next => async.times(5, (n, cb) => {
|
||||
s3.putObject({ Bucket: testBucket, Key: 'key2', Body: '123', Tagging: 'mykey=myvalue' }, cb);
|
||||
}, next),
|
||||
], done);
|
||||
});
|
||||
|
||||
after(done => async.series([
|
||||
next => removeAllVersions({ Bucket: testBucket }, next),
|
||||
next => s3.deleteBucket({ Bucket: testBucket }, next),
|
||||
next => s3.deleteBucket({ Bucket: emptyBucket }, next),
|
||||
next => s3.deleteBucket({ Bucket: nonVersionedBucket }, next),
|
||||
], done));
|
||||
|
||||
it('should return empty list of noncurrent versions if bucket is empty', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: emptyBucket,
|
||||
queryObj: { 'list-type': 'noncurrent' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Contents.length, 0);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty list of noncurrent versions if prefix does not apply', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'noncurrent', prefix: 'unknown' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Contents.length, 0);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return error if bucket does not exist', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: 'idonotexist',
|
||||
queryObj: { 'list-type': 'noncurrent' },
|
||||
authCredentials: credentials,
|
||||
}, err => {
|
||||
assert.strictEqual(err.code, 'NoSuchBucket');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return BadRequest error if list type is empty', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': '' },
|
||||
authCredentials: credentials,
|
||||
}, err => {
|
||||
assert.strictEqual(err.code, 'BadRequest');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return BadRequest error if list type is invalid', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'invalid' },
|
||||
authCredentials: credentials,
|
||||
}, err => {
|
||||
assert.strictEqual(err.code, 'BadRequest');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return InvalidArgument error if max-keys is invalid', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'noncurrent', 'max-keys': 'a' },
|
||||
authCredentials: credentials,
|
||||
}, err => {
|
||||
assert.strictEqual(err.code, 'InvalidArgument');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return error if bucket not versioned', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: nonVersionedBucket,
|
||||
queryObj: { 'list-type': 'noncurrent' },
|
||||
authCredentials: credentials,
|
||||
}, err => {
|
||||
assert.strictEqual(err.code, 'InvalidRequest');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return all the noncurrent versions', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'noncurrent' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
|
||||
const contents = data.Contents;
|
||||
assert.strictEqual(contents.length, 14);
|
||||
checkContents(contents);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return all the noncurrent versions with prefix key1', done => {
|
||||
const prefix = 'key1';
|
||||
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'noncurrent', prefix },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Prefix, prefix);
|
||||
|
||||
const contents = data.Contents;
|
||||
assert.strictEqual(contents.length, 7);
|
||||
assert(contents.every(d => d.Key === 'key1'));
|
||||
checkContents(contents);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return all the noncurrent versions with prefix key1 before a defined date', done => {
|
||||
const prefix = 'key1';
|
||||
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'noncurrent', prefix, 'before-date': date },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Prefix, prefix);
|
||||
|
||||
const contents = data.Contents;
|
||||
assert.strictEqual(contents.length, 2);
|
||||
assert(contents.every(d => d.Key === 'key1'));
|
||||
|
||||
assert.deepStrictEqual(contents.map(v => v.VersionId), expectedKey1VersionIds);
|
||||
|
||||
checkContents(contents);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the noncurrent version with prefix key1, before a defined date, and after marker', done => {
|
||||
const prefix = 'key2';
|
||||
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: {
|
||||
'list-type': 'noncurrent',
|
||||
prefix,
|
||||
'before-date': date,
|
||||
'key-marker': 'key1',
|
||||
},
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Prefix, prefix);
|
||||
|
||||
const contents = data.Contents;
|
||||
assert.strictEqual(contents.length, 2);
|
||||
assert(contents.every(d => d.Key === 'key2'));
|
||||
|
||||
assert.deepStrictEqual(contents.map(v => v.VersionId), expectedKey2VersionIds);
|
||||
|
||||
checkContents(contents);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the noncurrent version with prefix key1, before a defined date, and after marker', done => {
|
||||
const prefix = 'key2';
|
||||
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: {
|
||||
'list-type': 'noncurrent',
|
||||
prefix,
|
||||
'before-date': date,
|
||||
'key-marker': 'key2',
|
||||
'version-id-marker': expectedKey2VersionIds[0]
|
||||
},
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Prefix, prefix);
|
||||
|
||||
const contents = data.Contents;
|
||||
assert.strictEqual(contents.length, 1);
|
||||
assert(contents.every(d => d.Key === 'key2'));
|
||||
contents[0].Key = 'key2';
|
||||
contents[0].VersionId = expectedKey2VersionIds[1];
|
||||
|
||||
checkContents(contents);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the current versions before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'noncurrent', 'before-date': date },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
|
||||
const contents = data.Contents;
|
||||
assert.strictEqual(contents.length, 4);
|
||||
checkContents(contents);
|
||||
|
||||
const key1Versions = contents.filter(c => c.Key === 'key1');
|
||||
assert.strictEqual(key1Versions.length, 2);
|
||||
|
||||
const key2Versions = contents.filter(c => c.Key === 'key2');
|
||||
assert.strictEqual(key2Versions.length, 2);
|
||||
|
||||
assert.deepStrictEqual(key1Versions.map(v => v.VersionId), expectedKey1VersionIds);
|
||||
assert.deepStrictEqual(key2Versions.map(v => v.VersionId), expectedKey2VersionIds);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should truncate list of non current versions before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'noncurrent', 'before-date': date, 'max-keys': '1' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'key1');
|
||||
assert.strictEqual(data.NextVersionIdMarker, expectedKey1VersionIds[0]);
|
||||
assert.strictEqual(data.MaxKeys, 1);
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
|
||||
const contents = data.Contents;
|
||||
checkContents(contents);
|
||||
assert.strictEqual(contents[0].Key, 'key1');
|
||||
assert.strictEqual(contents[0].VersionId, expectedKey1VersionIds[0]);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the first following list of current versions before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: {
|
||||
'list-type': 'noncurrent',
|
||||
'before-date': date,
|
||||
'max-keys': '1',
|
||||
'key-marker': 'key1',
|
||||
'version-id-marker': expectedKey1VersionIds[0]
|
||||
},
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.KeyMarker, 'key1');
|
||||
assert.strictEqual(data.VersionIdMarker, expectedKey1VersionIds[0]);
|
||||
assert.strictEqual(data.NextKeyMarker, 'key1');
|
||||
assert.strictEqual(data.NextVersionIdMarker, expectedKey1VersionIds[1]);
|
||||
assert.strictEqual(data.MaxKeys, 1);
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
|
||||
const contents = data.Contents;
|
||||
checkContents(contents);
|
||||
assert.strictEqual(contents[0].Key, 'key1');
|
||||
assert.strictEqual(contents[0].VersionId, expectedKey1VersionIds[1]);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the second following list of current versions before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: {
|
||||
'list-type': 'noncurrent',
|
||||
'before-date': date,
|
||||
'max-keys': '1',
|
||||
'key-marker': 'key1',
|
||||
'version-id-marker': expectedKey1VersionIds[1]
|
||||
},
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.KeyMarker, 'key1');
|
||||
assert.strictEqual(data.VersionIdMarker, expectedKey1VersionIds[1]);
|
||||
assert.strictEqual(data.NextKeyMarker, 'key2');
|
||||
assert.strictEqual(data.NextVersionIdMarker, expectedKey2VersionIds[0]);
|
||||
assert.strictEqual(data.MaxKeys, 1);
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
|
||||
const contents = data.Contents;
|
||||
checkContents(contents);
|
||||
assert.strictEqual(contents[0].Key, 'key2');
|
||||
assert.strictEqual(contents[0].VersionId, expectedKey2VersionIds[0]);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the last and third following list of current versions before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: {
|
||||
'list-type': 'noncurrent',
|
||||
'before-date': date,
|
||||
'max-keys': '1',
|
||||
'key-marker': 'key2',
|
||||
'version-id-marker': expectedKey2VersionIds[0]
|
||||
},
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.KeyMarker, 'key2');
|
||||
assert.strictEqual(data.VersionIdMarker, expectedKey2VersionIds[0]);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert(!data.NextVersionIdMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1);
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
|
||||
const contents = data.Contents;
|
||||
checkContents(contents);
|
||||
assert.strictEqual(contents[0].Key, 'key2');
|
||||
assert.strictEqual(contents[0].VersionId, expectedKey2VersionIds[1]);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,332 @@
|
|||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
const BucketUtility = require('../aws-node-sdk/lib/utility/bucket-util');
|
||||
const { removeAllVersions } = require('../aws-node-sdk/lib/utility/versioning-util');
|
||||
const { makeBackbeatRequest } = require('./utils');
|
||||
|
||||
const testBucket = 'bucket-for-list-lifecycle-orphans-tests';
|
||||
const emptyBucket = 'empty-bucket-for-list-lifecycle-orphans-tests';
|
||||
const nonVersionedBucket = 'non-versioned-bucket-for-list-lifecycle-orphans-tests';
|
||||
|
||||
const credentials = {
|
||||
accessKey: 'WLI8X7JGPU1AWQEQIKM5',
|
||||
secretKey: '0Src2X+kIrR1SUo/NhR5o1V4hqU1dtlePBHAcCbV',
|
||||
};
|
||||
|
||||
function checkContents(contents) {
|
||||
contents.forEach(d => {
|
||||
assert(d.Key);
|
||||
assert(d.LastModified);
|
||||
assert(d.VersionId);
|
||||
assert(d.Etag);
|
||||
assert(d.Owner.DisplayName);
|
||||
assert(d.Owner.ID);
|
||||
assert(d.StorageClass);
|
||||
assert.strictEqual(d.StorageClass, 'STANDARD');
|
||||
assert(!d.TagSet);
|
||||
assert.strictEqual(d.IsLatest, true);
|
||||
assert.strictEqual(d.DataStoreName, 'us-east-1');
|
||||
assert.strictEqual(d.ListType, 'orphan');
|
||||
assert.strictEqual(d.Size, 0);
|
||||
});
|
||||
}
|
||||
|
||||
function createOrhanDeleteMarker(s3, bucketName, keyName, cb) {
|
||||
let versionId;
|
||||
return async.series([
|
||||
next => s3.putObject({ Bucket: bucketName, Key: keyName, Body: '123', Tagging: 'mykey=myvalue' },
|
||||
(err, data) => {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
versionId = data.VersionId;
|
||||
return next();
|
||||
}),
|
||||
next => s3.deleteObject({ Bucket: bucketName, Key: keyName }, next),
|
||||
next => s3.deleteObject({ Bucket: bucketName, Key: keyName, VersionId: versionId }, next),
|
||||
], cb);
|
||||
}
|
||||
|
||||
describe('listLifecycleOrphans', () => {
|
||||
let bucketUtil;
|
||||
let s3;
|
||||
let date;
|
||||
|
||||
before(done => {
|
||||
bucketUtil = new BucketUtility('account1', { signatureVersion: 'v4' });
|
||||
s3 = bucketUtil.s3;
|
||||
|
||||
return async.series([
|
||||
next => s3.createBucket({ Bucket: testBucket }, next),
|
||||
next => s3.createBucket({ Bucket: emptyBucket }, next),
|
||||
next => s3.createBucket({ Bucket: nonVersionedBucket }, next),
|
||||
next => s3.putBucketVersioning({
|
||||
Bucket: testBucket,
|
||||
VersioningConfiguration: { Status: 'Enabled' },
|
||||
}, next),
|
||||
next => s3.putBucketVersioning({
|
||||
Bucket: emptyBucket,
|
||||
VersioningConfiguration: { Status: 'Enabled' },
|
||||
}, next),
|
||||
next => async.times(3, (n, cb) => {
|
||||
createOrhanDeleteMarker(s3, testBucket, `key${n}old`, cb);
|
||||
}, next),
|
||||
next => {
|
||||
date = new Date(Date.now()).toISOString();
|
||||
return async.times(5, (n, cb) => {
|
||||
createOrhanDeleteMarker(s3, testBucket, `key${n}`, cb);
|
||||
}, next);
|
||||
},
|
||||
], done);
|
||||
});
|
||||
|
||||
after(done => async.series([
|
||||
next => removeAllVersions({ Bucket: testBucket }, next),
|
||||
next => s3.deleteBucket({ Bucket: testBucket }, next),
|
||||
next => s3.deleteBucket({ Bucket: emptyBucket }, next),
|
||||
next => s3.deleteBucket({ Bucket: nonVersionedBucket }, next),
|
||||
], done));
|
||||
|
||||
it('should return empty list of orphan delete markers if bucket is empty', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: emptyBucket,
|
||||
queryObj: { 'list-type': 'orphan' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Contents.length, 0);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return empty list of orphan delete markers if prefix does not apply', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'orphan', prefix: 'unknown' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Contents.length, 0);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return InvalidArgument error if max-keys is invalid', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'orphan', 'max-keys': 'a' },
|
||||
authCredentials: credentials,
|
||||
}, err => {
|
||||
assert.strictEqual(err.code, 'InvalidArgument');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return error if bucket does not exist', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: 'idonotexist',
|
||||
queryObj: { 'list-type': 'orphan' },
|
||||
authCredentials: credentials,
|
||||
}, err => {
|
||||
assert.strictEqual(err.code, 'NoSuchBucket');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return all the current versions', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'orphan' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
|
||||
const contents = data.Contents;
|
||||
assert.strictEqual(contents.length, 8);
|
||||
checkContents(contents);
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return all the current versions with prefix key1', done => {
|
||||
const prefix = 'key1';
|
||||
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'orphan', prefix },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Prefix, prefix);
|
||||
|
||||
const contents = data.Contents;
|
||||
assert.strictEqual(contents.length, 2);
|
||||
checkContents(contents);
|
||||
|
||||
assert.strictEqual(contents[0].Key, 'key1');
|
||||
assert.strictEqual(contents[1].Key, 'key1old');
|
||||
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the current versions before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: {
|
||||
'list-type': 'orphan',
|
||||
'before-date': date,
|
||||
},
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert(!data.NextKeyMarker);
|
||||
assert.strictEqual(data.MaxKeys, 1000);
|
||||
assert.strictEqual(data.Contents.length, 3);
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
|
||||
const contents = data.Contents;
|
||||
checkContents(contents);
|
||||
assert.strictEqual(contents[0].Key, 'key0old');
|
||||
assert.strictEqual(contents[1].Key, 'key1old');
|
||||
assert.strictEqual(contents[2].Key, 'key2old');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should truncate list of orphan delete markers before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: {
|
||||
'list-type': 'orphan',
|
||||
'before-date': date,
|
||||
'max-keys': '1',
|
||||
},
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.NextKeyMarker, 'key0old');
|
||||
assert.strictEqual(data.MaxKeys, 1);
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
|
||||
const contents = data.Contents;
|
||||
checkContents(contents);
|
||||
assert.strictEqual(contents[0].Key, 'key0old');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the second truncate list of orphan delete markers before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'orphan', 'before-date': date, 'max-keys': '1', 'key-marker': 'key0old' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.KeyMarker, 'key0old');
|
||||
assert.strictEqual(data.NextKeyMarker, 'key1old');
|
||||
assert.strictEqual(data.MaxKeys, 1);
|
||||
assert.strictEqual(data.Contents.length, 1);
|
||||
|
||||
const contents = data.Contents;
|
||||
checkContents(contents);
|
||||
assert.strictEqual(contents[0].Key, 'key1old');
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the third truncate list of orphan delete markers before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'orphan', 'before-date': date, 'max-keys': '1', 'key-marker': 'key1old' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, true);
|
||||
assert.strictEqual(data.MaxKeys, 1);
|
||||
assert.strictEqual(data.KeyMarker, 'key1old');
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
assert.strictEqual(data.NextKeyMarker, 'key2old');
|
||||
|
||||
const contents = data.Contents;
|
||||
assert.strictEqual(contents.length, 1);
|
||||
checkContents(contents);
|
||||
assert.strictEqual(contents[0].Key, 'key2old');
|
||||
return done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return the fourth and last truncate list of orphan delete markers before a defined date', done => {
|
||||
makeBackbeatRequest({
|
||||
method: 'GET',
|
||||
bucket: testBucket,
|
||||
queryObj: { 'list-type': 'orphan', 'before-date': date, 'max-keys': '1', 'key-marker': 'key2old' },
|
||||
authCredentials: credentials,
|
||||
}, (err, response) => {
|
||||
assert.ifError(err);
|
||||
assert.strictEqual(response.statusCode, 200);
|
||||
const data = JSON.parse(response.body);
|
||||
|
||||
assert.strictEqual(data.IsTruncated, false);
|
||||
assert.strictEqual(data.MaxKeys, 1);
|
||||
assert.strictEqual(data.KeyMarker, 'key2old');
|
||||
assert.strictEqual(data.BeforeDate, date);
|
||||
|
||||
const contents = data.Contents;
|
||||
assert.strictEqual(contents.length, 0);
|
||||
return done();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,36 @@
|
|||
const { makeRequest } = require('../raw-node/utils/makeRequest');
|
||||
|
||||
const ipAddress = process.env.IP ? process.env.IP : '127.0.0.1';
|
||||
|
||||
/** makeBackbeatRequest - utility function to generate a request going
|
||||
* through backbeat route
|
||||
* @param {object} params - params for making request
|
||||
* @param {string} params.method - request method
|
||||
* @param {string} params.bucket - bucket name
|
||||
* @param {string} params.subCommand - subcommand to backbeat
|
||||
* @param {object} [params.headers] - headers and their string values
|
||||
* @param {object} [params.authCredentials] - authentication credentials
|
||||
* @param {object} params.authCredentials.accessKey - access key
|
||||
* @param {object} params.authCredentials.secretKey - secret key
|
||||
* @param {string} [params.requestBody] - request body contents
|
||||
* @param {function} callback - with error and response parameters
|
||||
* @return {undefined} - and call callback
|
||||
*/
|
||||
function makeBackbeatRequest(params, callback) {
|
||||
const { method, headers, bucket, authCredentials, queryObj } = params;
|
||||
const options = {
|
||||
hostname: ipAddress,
|
||||
port: 8000,
|
||||
method,
|
||||
headers,
|
||||
authCredentials,
|
||||
path: `/_/backbeat/lifecycle/${bucket}`,
|
||||
jsonResponse: true,
|
||||
queryObj,
|
||||
};
|
||||
makeRequest(options, callback);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
makeBackbeatRequest,
|
||||
};
|
|
@ -732,9 +732,9 @@ arraybuffer.slice@~0.0.7:
|
|||
optionalDependencies:
|
||||
ioctl "^2.0.2"
|
||||
|
||||
"arsenal@git+https://github.com/scality/arsenal#8.1.84":
|
||||
version "8.1.84"
|
||||
resolved "git+https://github.com/scality/arsenal#22fa04b7e7ac0f5e6ec54773aed2aba1363ed16e"
|
||||
"arsenal@git+https://github.com/scality/Arsenal#feature/ARSN-312/listLifecycleOrphan":
|
||||
version "8.1.85"
|
||||
resolved "git+https://github.com/scality/Arsenal#71935daefbefd83a6018566381cc2b849f57bfff"
|
||||
dependencies:
|
||||
"@azure/identity" "^3.1.1"
|
||||
"@azure/storage-blob" "^12.12.0"
|
||||
|
|
Loading…
Reference in New Issue