Compare commits

...

25 Commits

Author SHA1 Message Date
Vinh Tao 42c8a7d989 test: general versioning 2017-03-21 12:58:32 +01:00
Vinh Tao fc3926b515 test: fixes and clean-up 2017-03-21 12:58:32 +01:00
Vinh Tao 5b08c13739 test: update completeMPU 2017-03-21 12:57:02 +01:00
Vinh Tao 8c0dfd9329 test: objectCopy 2017-03-21 12:57:02 +01:00
Vinh Tao de0bedc436 test: objectACL [wip, skipped] 2017-03-21 12:57:02 +01:00
Vinh Tao b374c321c9 test: multiOjectDelete 2017-03-21 12:53:15 +01:00
Vinh Tao 7ed5e8f3c8 test: list object versions 2017-03-21 12:53:15 +01:00
Vinh Tao 4cdcd34ffa test: basic functional tests 2017-03-21 12:53:15 +01:00
Vinh Tao 9f450c4530 test: update existing unit tests 2017-03-21 12:53:14 +01:00
Vinh Tao dff694df24 ft: versioning bucketDelete 2017-03-21 12:53:14 +01:00
Vinh Tao ac74957a70 ft: versioning multiObjectDelete 2017-03-21 12:19:41 +01:00
Vinh Tao b71d22eb51 ft: versioning objectPutCopyPart 2017-03-21 12:19:41 +01:00
Vinh Tao a16a1f3dac ft: versioning objectCopy 2017-03-21 12:19:41 +01:00
Vinh Tao 50afdafa52 ft: versioning objectGetACL 2017-03-21 12:19:41 +01:00
Vinh Tao a9f4559e17 ft: versioning objectPutACL 2017-03-21 12:19:41 +01:00
Vinh Tao 80b33222db ft: versioning listMultipartUploads 2017-03-21 12:19:41 +01:00
Vinh Tao 306ca598c5 ft: versioning completeMultipartUpload 2017-03-21 12:19:41 +01:00
Vinh Tao 671506fb9f ft: versioning objectHead 2017-03-21 12:19:41 +01:00
Vinh Tao 1986aa23ac ft: versioning bucketGet 2017-03-21 12:19:41 +01:00
Vinh Tao bbdce5d750 ft: versioning objectGet api 2017-03-21 12:19:41 +01:00
Vinh Tao 7f69be387b ft: versioning objectDelete api 2017-03-21 12:19:41 +01:00
Vinh Tao 53512eb359 ft: versioning for objectPut api 2017-03-21 12:19:41 +01:00
Vinh Tao 61a3dabe25 ft: versioning for metadata local backends 2017-03-21 12:19:41 +01:00
Vinh Tao b2210d3704 ft: versioning tools 2017-03-21 12:19:41 +01:00
Vinh Tao 259ca69e93 clean: some optimization and dev dependencies 2017-03-21 12:19:41 +01:00
52 changed files with 5292 additions and 773 deletions

View File

@ -91,7 +91,7 @@ test:
- S3BACKEND=file S3VAULT=mem MPU_TESTING=yes npm start
> $CIRCLE_ARTIFACTS/server_file_awssdk.txt
& bash wait_for_local_port.bash 8000 40
&& npm run ft_awssdk
&& VERSIONING=no npm run ft_awssdk
- S3BACKEND=file S3VAULT=mem npm start
> $CIRCLE_ARTIFACTS/server_file_s3cmd.txt
& bash wait_for_local_port.bash 8000 40
@ -109,7 +109,7 @@ test:
- S3BACKEND=file S3VAULT=mem MPU_TESTING=yes npm start
> $CIRCLE_ARTIFACTS/server_file_kms_awssdk.txt
& bash wait_for_local_port.bash 8000 40
&& ENABLE_KMS_ENCRYPTION=true npm run ft_awssdk
&& VERSIONING=no ENABLE_KMS_ENCRYPTION=true npm run ft_awssdk
- S3BACKEND=file S3VAULT=mem npm start
> $CIRCLE_ARTIFACTS/server_file_kms_s3cmd.txt
& bash wait_for_local_port.bash 8000 40

View File

@ -68,21 +68,20 @@ export default {
.update('', 'binary').digest('hex'),
// Queries supported by AWS that we do not currently support.
unsupportedQueries: [
'accelerate',
'analytics',
'inventory',
'lifecycle',
'list-type',
'logging',
'metrics',
'notification',
'policy',
'replication',
'requestPayment',
'restore',
'tagging',
'torrent',
'versions',
],
unsupportedQueries: {
'accelerate': true,
'analytics': true,
'inventory': true,
'lifecycle': true,
'list-type': true,
'logging': true,
'metrics': true,
'notification': true,
'policy': true,
'replication': true,
'requestPayment': true,
'restore': true,
'tagging': true,
'torrent': true,
},
};

View File

@ -1,6 +1,7 @@
import url from 'url';
import querystring from 'querystring';
import { auth, errors } from 'arsenal';
import { auth, errors, versioning } from 'arsenal';
import bucketDelete from './bucketDelete';
import bucketDeleteCors from './bucketDeleteCors';
@ -29,7 +30,7 @@ import objectDelete from './objectDelete';
import objectGet from './objectGet';
import objectGetACL from './objectGetACL';
import objectHead from './objectHead';
import objectPut from './objectPut';
import { objectPut } from './objectPut';
import objectPutACL from './objectPutACL';
import objectPutPart from './objectPutPart';
import objectPutCopyPart from './objectPutCopyPart';
@ -40,6 +41,8 @@ import vault from '../auth/vault';
import websiteGet from './websiteGet';
import websiteHead from './websiteHead';
const VID = versioning.VersionID;
auth.setHandler(vault);
const api = {
@ -52,9 +55,11 @@ const api = {
}
let sourceBucket;
let sourceObject;
let sourceVersionId = undefined;
if (apiMethod === 'objectCopy' || apiMethod === 'objectPutCopyPart') {
let source =
querystring.unescape(request.headers['x-amz-copy-source']);
const { pathname, query } =
url.parse(request.headers['x-amz-copy-source']);
let source = querystring.unescape(pathname);
// If client sends the source bucket/object with a leading /,
// remove it
if (source[0] === '/') {
@ -67,6 +72,17 @@ const api = {
// Pull the source bucket and source object separated by /
sourceBucket = source.slice(0, slashSeparator);
sourceObject = source.slice(slashSeparator + 1);
sourceVersionId = query ?
querystring.parse(query).versionId : undefined;
sourceVersionId = sourceVersionId || undefined;
if (sourceVersionId) {
try {
sourceVersionId = VID.decrypt(sourceVersionId);
} catch (exception) {
return callback(errors.InvalidArgument
.customizeDescription('Invalid version id specified'));
}
}
}
const requestContexts = prepareRequestContexts(apiMethod,
request, locationConstraint, sourceBucket, sourceObject);
@ -91,7 +107,7 @@ const api = {
if (apiMethod === 'objectCopy' ||
apiMethod === 'objectPutCopyPart') {
return this[apiMethod](userInfo, request, sourceBucket,
sourceObject, log, callback);
sourceObject, sourceVersionId, log, callback);
}
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
return this[apiMethod](userInfo, request, streamingV4Params,

View File

@ -111,13 +111,17 @@ export function deleteBucket(bucketMD, bucketName, canonicalID, log, cb) {
return async.waterfall([
function checkForObjectsStep(next) {
return metadata.listObject(bucketName, { maxKeys: 1 }, log,
(err, objectsListRes) => {
const params = { maxKeys: 1, listingType: 'DelimiterVersions' };
return metadata.listObject(bucketName, params, log,
(err, list) => {
if (err) {
log.error('error from metadata', { error: err });
return next(err);
}
if (objectsListRes.Contents.length) {
const length = (list.Versions ? list.Versions.length : 0) +
(list.DeleteMarkers ? list.DeleteMarkers.length : 0);
log.debug('listing result', { length });
if (length) {
log.debug('bucket delete failed',
{ error: errors.BucketNotEmpty });
return next(errors.BucketNotEmpty);

View File

@ -5,7 +5,9 @@ import services from '../services';
import collectCorsHeaders from '../utilities/collectCorsHeaders';
import escapeForXML from '../utilities/escapeForXML';
import { pushMetric } from '../utapi/utilities';
import { errors } from 'arsenal';
import { errors, versioning } from 'arsenal';
const VID = versioning.VersionID;
// Sample XML response:
/* <ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
@ -31,6 +33,126 @@ import { errors } from 'arsenal';
</CommonPrefixes>
</ListBucketResult>*/
function processVersions(bucketName, listParams, list) {
const xml = [];
xml.push(
'<?xml version="1.0" encoding="UTF-8"?>',
'<ListVersionsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
'<Name>', bucketName, '</Name>'
);
const isTruncated = list.IsTruncated ? 'true' : 'false';
const xmlParams = [
{ tag: 'Prefix', value: listParams.prefix },
{ tag: 'KeyMarker', value: listParams.keyMarker },
{ tag: 'VersionIdMarker', value: listParams.versionIdMarker },
{ tag: 'NextKeyMarker', value: list.NextKeyMarker },
{ tag: 'NextVersionIdMarker', value: list.NextVersionIdMarker },
{ tag: 'MaxKeys', value: listParams.maxKeys },
{ tag: 'Delimiter', value: listParams.delimiter },
{ tag: 'EncodingType', value: listParams.encoding },
{ tag: 'IsTruncated', value: isTruncated },
];
const escapeXmlFn = listParams.encoding === 'url' ?
querystring.escape : escapeForXML;
xmlParams.forEach(p => {
if (p.value) {
const val = p.tag !== 'NextVersionIdMarker' || p.value === 'null' ?
p.value : VID.encrypt(p.value);
xml.push(`<${p.tag}>${escapeXmlFn(val)}</${p.tag}>`);
}
});
let lastKey = listParams.keyMarker;
list.Versions.forEach(item => {
const v = JSON.parse(item.value);
const objectKey = escapeXmlFn(item.key);
const isLatest = lastKey !== objectKey;
lastKey = objectKey;
xml.push(
v.isDeleteMarker ? '<DeleteMarker>' : '<Version>',
`<Key>${objectKey}</Key>`,
'<VersionId>',
(v.isNull || v.versionId === undefined) ?
'null' : VID.encrypt(v.versionId),
'</VersionId>',
`<IsLatest>${isLatest}</IsLatest>`,
`<LastModified>${v['last-modified']}</LastModified>`,
`<ETag>&quot;${v['content-md5']}&quot;</ETag>`,
`<Size>${v['content-length']}</Size>`,
'<Owner>',
`<ID>${v['owner-id']}</ID>`,
`<DisplayName>${v['owner-display-name']}</DisplayName>`,
'</Owner>',
`<StorageClass>${v['x-amz-storage-class']}</StorageClass>`,
v.isDeleteMarker ? '</DeleteMarker>' : '</Version>'
);
});
list.CommonPrefixes.forEach(item => {
const val = escapeXmlFn(item);
xml.push(`<CommonPrefixes><Prefix>${val}</Prefix></CommonPrefixes>`);
});
xml.push('</ListVersionsResult>');
return xml.join('');
}
function processMasterVersions(bucketName, listParams, list) {
const xml = [];
xml.push(
'<?xml version="1.0" encoding="UTF-8"?>',
'<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
'<Name>', bucketName, '</Name>'
);
const isTruncated = list.IsTruncated ? 'true' : 'false';
const xmlParams = [
{ tag: 'Prefix', value: listParams.prefix || '' },
{ tag: 'Marker', value: listParams.marker || '' },
{ tag: 'NextMarker', value: list.NextMarker },
{ tag: 'MaxKeys', value: listParams.maxKeys },
{ tag: 'Delimiter', value: listParams.delimiter },
{ tag: 'EncodingType', value: listParams.encoding },
{ tag: 'IsTruncated', value: isTruncated },
];
const escapeXmlFn = listParams.encoding === 'url' ?
querystring.escape : escapeForXML;
xmlParams.forEach(p => {
if (p.value) {
xml.push(`<${p.tag}>${escapeXmlFn(p.value)}</${p.tag}>`);
} else if (p.tag !== 'NextMarker' &&
p.tag !== 'EncodingType' &&
p.tag !== 'Delimiter') {
xml.push(`<${p.tag}/>`);
}
});
list.Contents.forEach(item => {
const v = item.value;
if (v.isDeleteMarker) {
return null;
}
const objectKey = escapeXmlFn(item.key);
return xml.push(
'<Contents>',
`<Key>${objectKey}</Key>`,
`<LastModified>${v.LastModified}</LastModified>`,
`<ETag>&quot;${v.ETag}&quot;</ETag>`,
`<Size>${v.Size}</Size>`,
'<Owner>',
`<ID>${v.Owner.ID}</ID>`,
`<DisplayName>${v.Owner.DisplayName}</DisplayName>`,
'</Owner>',
`<StorageClass>${v.StorageClass}</StorageClass>`,
'</Contents>'
);
});
list.CommonPrefixes.forEach(item => {
const val = escapeXmlFn(item);
xml.push(`<CommonPrefixes><Prefix>${val}</Prefix></CommonPrefixes>`);
});
xml.push('</ListBucketResult>');
return xml.join('');
}
/**
* bucketGet - Return list of objects in bucket
* @param {AuthInfo} authInfo - Instance of AuthInfo class with
@ -50,7 +172,6 @@ export default function bucketGet(authInfo, request, log, callback) {
return callback(errors.InvalidArgument.customizeDescription('Invalid ' +
'Encoding Method specified in Request'));
}
const escapeXmlFn = encoding === 'url' ? querystring.escape : escapeForXML;
const requestMaxKeys = params['max-keys'] ?
Number.parseInt(params['max-keys'], 10) : 1000;
if (Number.isNaN(requestMaxKeys) || requestMaxKeys < 0) {
@ -68,6 +189,7 @@ export default function bucketGet(authInfo, request, log, callback) {
log,
};
const listParams = {
listingType: 'Delimiter',
maxKeys: actualMaxKeys,
delimiter: params.delimiter,
marker: params.marker,
@ -81,70 +203,31 @@ export default function bucketGet(authInfo, request, log, callback) {
log.debug('error processing request', { error: err });
return callback(err, null, corsHeaders);
}
if (params.versions !== undefined) {
listParams.listingType = 'DelimiterVersions';
delete listParams.marker;
listParams.keyMarker = params['key-marker'];
listParams.versionIdMarker = params['version-id-marker'] ?
VID.decrypt(params['version-id-marker']) : undefined;
} else {
listParams.listingType = 'DelimiterMaster';
}
return services.getObjectListing(bucketName, listParams, log,
(err, list) => {
if (err) {
log.debug('error processing request', { error: err });
return callback(err, null, corsHeaders);
}
const xml = [];
xml.push(
'<?xml version="1.0" encoding="UTF-8"?>',
'<ListBucketResult xmlns="http://s3.amazonaws.com/doc/' +
'2006-03-01/">',
`<Name>${bucketName}</Name>`
);
const isTruncated = list.IsTruncated ? 'true' : 'false';
const xmlParams = [
{ tag: 'Prefix', value: listParams.prefix },
{ tag: 'NextMarker', value: list.NextMarker },
{ tag: 'Marker', value: listParams.marker },
{ tag: 'MaxKeys', value: requestMaxKeys },
{ tag: 'Delimiter', value: listParams.delimiter },
{ tag: 'EncodingType', value: encoding },
{ tag: 'IsTruncated', value: isTruncated },
];
xmlParams.forEach(p => {
if (p.value) {
xml.push(`<${p.tag}>${escapeXmlFn(p.value)}</${p.tag}>`);
} else if (p.tag !== 'NextMarker' &&
p.tag !== 'EncodingType' &&
p.tag !== 'Delimiter') {
xml.push(`<${p.tag}/>`);
}
});
list.Contents.forEach(item => {
const v = item.value;
const objectKey = escapeXmlFn(item.key);
xml.push(
'<Contents>',
`<Key>${objectKey}</Key>`,
`<LastModified>${v.LastModified}</LastModified>`,
`<ETag>&quot;${v.ETag}&quot;</ETag>`,
`<Size>${v.Size}</Size>`,
'<Owner>',
`<ID>${v.Owner.ID}</ID>`,
`<DisplayName>${v.Owner.DisplayName}</DisplayName>`,
'</Owner>',
`<StorageClass>${v.StorageClass}</StorageClass>`,
'</Contents>'
);
});
list.CommonPrefixes.forEach(item => {
const val = escapeXmlFn(item);
xml.push(
`<CommonPrefixes><Prefix>${val}</Prefix></CommonPrefixes>`
);
});
xml.push('</ListBucketResult>');
pushMetric('listBucket', log, {
authInfo,
bucket: bucketName,
});
return callback(null, xml.join(''), corsHeaders);
listParams.maxKeys = requestMaxKeys;
listParams.encoding = encoding;
let res = undefined;
if (listParams.listingType === 'DelimiterVersions') {
res = processVersions(bucketName, listParams, list);
} else {
res = processMasterVersions(bucketName, listParams, list);
}
pushMetric('listBucket', log, { authInfo, bucket: bucketName });
return callback(null, res, corsHeaders);
});
});
return undefined;

View File

@ -1,4 +1,4 @@
import { errors } from 'arsenal';
import { errors, versioning } from 'arsenal';
import async from 'async';
import crypto from 'crypto';
import { parseString } from 'xml2js';
@ -13,6 +13,8 @@ import services from '../services';
import { logger } from '../utilities/logger';
const VID = versioning.VersionID;
/*
Format of xml request:
<CompleteMultipartUpload>
@ -84,6 +86,9 @@ function completeMultipartUpload(authInfo, request, log, callback) {
log.debug('processing request', { method: 'completeMultipartUpload' });
const bucketName = request.bucketName;
const objectKey = request.objectKey;
let generatedVersionId = undefined;
let bucketMD = undefined;
let objectMD = undefined;
const hostname = request.parsedHost;
const uploadId = request.query.uploadId;
const metadataValParams = {
@ -127,6 +132,8 @@ function completeMultipartUpload(authInfo, request, log, callback) {
services.metadataValidateAuthorization(metadataValParams, next);
},
function waterfall2(destBucket, objMD, next) {
bucketMD = destBucket;
objectMD = objMD;
services.metadataValidateMultipart(metadataValParams,
(err, mpuBucket) => {
if (err) {
@ -380,22 +387,43 @@ function completeMultipartUpload(authInfo, request, log, callback) {
masterKeyId: destinationBucket.getSseMasterKeyId(),
};
}
services.metadataStoreObject(destinationBucket.getName(),
dataLocations, pseudoCipherBundle, metaStoreParams, err => {
if (err) {
return next(err, destinationBucket);
}
if (objMD && objMD.location) {
const dataToDelete = Array.isArray(objMD.location) ?
objMD.location : [objMD.location];
data.batchDelete(dataToDelete, logger
.newRequestLoggerFromSerializedUids(log
.getSerializedUids()));
}
return next(null, mpuBucket, mpuOverviewKey,
aggregateETag, storedPartsAsObjects,
extraPartLocations, destinationBucket);
});
let versioningOptions = undefined;
async.waterfall([
callback => services.versioningPreprocessing(bucketName,
bucketMD, objectKey, objectMD, null, log, callback),
(options, callback) => {
versioningOptions = options;
// eslint-disable-next-line
metaStoreParams.versionId = options.versionId;
// eslint-disable-next-line
metaStoreParams.versioning = options.versioning;
// eslint-disable-next-line
metaStoreParams.isNull = options.isNull;
// eslint-disable-next-line
metaStoreParams.nullVersionId = options.nullVersionId;
services.metadataStoreObject(
destinationBucket.getName(), dataLocations,
pseudoCipherBundle, metaStoreParams, callback);
},
], (err, res) => {
// TODO to include the versioning information in result
if (err) {
return next(err, destinationBucket);
}
if (res && res.versionId) {
generatedVersionId = res.versionId;
}
if (objMD && objMD.location && versioningOptions.deleteData) {
const dataToDelete = Array.isArray(objMD.location) ?
objMD.location : [objMD.location];
data.batchDelete(dataToDelete, logger
.newRequestLoggerFromSerializedUids(log
.getSerializedUids()));
}
return next(null, mpuBucket, mpuOverviewKey,
aggregateETag, storedPartsAsObjects,
extraPartLocations, destinationBucket);
});
},
function waterfall8(mpuBucket, mpuOverviewKey, aggregateETag,
storedPartsAsObjects, extraPartLocations, destinationBucket, next) {
@ -417,6 +445,9 @@ function completeMultipartUpload(authInfo, request, log, callback) {
if (err) {
return callback(err, null, corsHeaders);
}
if (generatedVersionId) {
corsHeaders['x-amz-version-id'] = VID.encrypt(generatedVersionId);
}
xmlParams.ETag = `"${aggregateETag}"`;
const xml = _convertToXml(xmlParams);
pushMetric('completeMultipartUpload', log, {

View File

@ -213,7 +213,7 @@ export default function listMultipartUploads(authInfo,
maxKeys: maxUploads,
prefix: `overview${splitter}${prefix}`,
queryPrefixLength: prefix.length,
listingType: 'multipartuploads',
listingType: 'MPU',
splitter,
};
services.getMultipartUploadListing(mpuBucketName, listingParams,

View File

@ -1,7 +1,7 @@
import crypto from 'crypto';
import async from 'async';
import { auth, errors } from 'arsenal';
import { auth, errors, versioning } from 'arsenal';
import { parseString } from 'xml2js';
import escapeForXML from '../utilities/escapeForXML';
@ -12,6 +12,9 @@ import metadata from '../metadata/wrapper';
import services from '../services';
import vault from '../auth/vault';
import { isBucketAuthorized } from './apiUtils/authorization/aclChecks';
import { createAndStoreObject } from './objectPut';
const VID = versioning.VersionID;
/*
@ -50,8 +53,12 @@ import { isBucketAuthorized } from './apiUtils/authorization/aclChecks';
* @param {boolean} quietSetting - true if xml should just include error list
* and false if should include deleted list and error list
* @param {object []} errorResults - list of error result objects with each
* object containing -- key: objectName, error: arsenal error
* @param {string []} deleted - list of object keys deleted
* object containing -- entry: { key, versionId }, error: arsenal error
* @param {object []} deleted - list of object deleted, an object has the format
* object: { entry, result, isDeletingDeleteMarker }
* object.entry : above
* object.result: stringification of { versionId }
* object.isDeletingDeleteMarker: name as comment
* @return {string} xml string
*/
function _formatXML(quietSetting, errorResults, deleted) {
@ -59,9 +66,14 @@ function _formatXML(quietSetting, errorResults, deleted) {
errorResults.forEach(errorObj => {
errorXML.push(
'<Error>',
'<Key>', escapeForXML(errorObj.key), '</Key>',
'<Code>', errorObj.error.message, '</Code>',
'<Message>', errorObj.error.description, '</Message>',
'<Key>', escapeForXML(errorObj.entry.key), '</Key>',
'<Code>', errorObj.error.message, '</Code>');
if (errorObj.entry.versionId) {
const version = errorObj.entry.versionId === 'null' ?
'null' : escapeForXML(errorObj.entry.versionId);
errorXML.push('<VersionId>', version, '</VersionId>');
}
errorXML.push('<Message>', errorObj.error.description, '</Message>',
'</Error>'
);
});
@ -79,12 +91,39 @@ function _formatXML(quietSetting, errorResults, deleted) {
return xml.join('');
}
const deletedXML = [];
deleted.forEach(objKey => {
deleted.forEach(version => {
// TODO include isDeletingDeleteMarker in the result
const isDeleteMarker = !!version.result;
const isDeletingDeleteMarker = version.isDeletingDeleteMarker;
deletedXML.push(
'<Deleted>',
'<Key>', escapeForXML(objKey), '</Key>',
'</Deleted>'
'<Key>',
escapeForXML(version.entry.key),
'</Key>'
);
if (version.entry.versionId) {
deletedXML.push(
'<VersionId>',
version.entry.versionId === 'null' ?
'null' : VID.encrypt(escapeForXML(version.entry.versionId)),
'</VersionId>'
);
}
if (isDeleteMarker) {
deletedXML.push(
'<DeleteMarker>',
isDeleteMarker,
'</DeleteMarker>'
);
}
if (isDeletingDeleteMarker) {
deletedXML.push(
'<DeleteMarkerVersionId>',
isDeletingDeleteMarker,
'</DeleteMarkerVersionId>'
);
}
deletedXML.push('</Deleted>');
});
xml[2] = deletedXML.join('');
return xml.join('');
@ -92,21 +131,54 @@ function _formatXML(quietSetting, errorResults, deleted) {
function _parseXml(xmlToParse, next) {
return parseString(xmlToParse, (err, result) => {
let itemError = null;
if (err || !result || !result.Delete) {
return next(errors.MalformedXML);
}
const json = result.Delete;
// not quiet is the default if nothing specified
const quietSetting = json.Quiet && json.Quiet[0] === 'true';
// format of json is {"Object":[{"Key":["test1"]},{"Key":["test2"]}]}
const objects = json.Object.map(item => item.Key[0]);
return next(null, quietSetting, objects);
// format of json is
// {"Object":[
// {"Key":["test1"],"VersionId":["vid"]},
// {"Key":["test2"]}
// ]}
const objects = [];
const itemErrors = [];
for (let i = 0; i < json.Object.length; i++) {
const item = json.Object[i];
if (!item.Key) {
return next(errors.MalformedXML);
}
const object = { key: item.Key[0] };
// TODO check aws behaviour, maybe returning InvalidArgument
if (item.VersionId) {
try {
object.versionId = item.VersionId[0] === 'null' ?
'null' : VID.decrypt(item.VersionId[0]);
} catch (exception) {
itemError = errors.NoSuchVersion;
}
}
if (itemError) {
itemErrors.push({ key: item.Key, versionId: item.VersionId,
error: itemError });
itemError = null;
} else {
objects.push(object);
}
}
return next(null, quietSetting, objects, itemErrors);
});
}
/**
* gets object metadata and deletes object
* @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info
* @param {string} canonicalID - canonicalId of requester
* @param {object} request - http request
* @param {string} bucketName - bucketName
* @param {BucketInfo} bucket - bucket
* @param {boolean} quietSetting - true if xml should just include error list
* and false if should include deleted list and error list
* @param {object []} errorResults - list of error result objects with each
@ -118,8 +190,8 @@ function _parseXml(xmlToParse, next) {
* @callback called with (err, quietSetting, errorResults, numOfObjects,
* successfullyDeleted, totalContentLengthDeleted)
*/
export function getObjMetadataAndDelete(bucketName, quietSetting,
errorResults, inPlay, log, next) {
export function getObjMetadataAndDelete(authInfo, canonicalID, request,
bucketName, bucket, quietSetting, errorResults, inPlay, log, next) {
const successfullyDeleted = [];
let totalContentLengthDeleted = 0;
let numOfObjects = 0;
@ -128,48 +200,59 @@ export function getObjMetadataAndDelete(bucketName, quietSetting,
// doing 5 requests at a time. note that the data wrapper
// will do 5 parallel requests to data backend to delete parts
return async.forEachLimit(inPlay, 5, (key, moveOn) => {
metadata.getObjectMD(bucketName, key, {}, log, (err, objMD) => {
return async.forEachLimit(inPlay, 5, (entry, moveOn) => {
const opts = { versionId: entry.versionId };
metadata.getObjectMD(bucketName, entry.key, opts, log, (err, objMD) => {
// if general error from metadata return error
if (err && !err.NoSuchKey) {
log.error('error getting object MD', { error: err, key });
errorResults.push({
key,
error: err,
});
log.error('error getting object MD',
{ error: err, key: entry.key });
errorResults.push({ entry, error: err });
return moveOn();
}
// if particular key does not exist, AWS returns success
// for key so add to successfullyDeleted list and move on
if (err && err.NoSuchKey) {
successfullyDeleted.push(key);
successfullyDeleted.push({ entry });
return moveOn();
}
return services.deleteObject(bucketName, objMD, key, log,
err => {
if (err) {
log.error('error deleting object', { error: err, key });
errorResults.push({
key,
error: err,
});
return moveOn();
let deleted = false;
return async.waterfall([
callback => services.preprocessingVersioningDelete(bucketName,
bucket, entry.key, objMD, entry.versionId, log, callback),
(options, callback) => {
if (options && options.deleteData) {
deleted = true;
return services.deleteObject(bucketName, objMD,
entry.key, options, log, callback);
}
if (objMD['content-length']) {
totalContentLengthDeleted +=
objMD['content-length'];
}
numOfObjects++;
successfullyDeleted.push(key);
request.isDeleteMarker = true; // eslint-disable-line
// TODO need authInfo and canonicalID
return createAndStoreObject(bucketName, bucket, entry.key,
objMD, authInfo, canonicalID, null, request, null,
log, callback);
},
], (err, res) => {
if (err) {
log.error('error deleting object', { error: err, entry });
errorResults.push({ entry, error: err });
return moveOn();
});
}
if (deleted && objMD['content-length']) {
totalContentLengthDeleted += objMD['content-length'];
}
numOfObjects++;
successfullyDeleted.push({ entry, result: res,
isDeletingDeleteMarker: objMD.isDeleteMarker });
return moveOn();
});
});
},
// end of forEach func
err => {
log.trace('finished deleting objects', { numOfObjects });
return next(err, quietSetting, errorResults, numOfObjects,
successfullyDeleted, totalContentLengthDeleted);
successfullyDeleted, totalContentLengthDeleted, bucket);
});
}
@ -205,17 +288,28 @@ function multiObjectDelete(authInfo, request, log, callback) {
return async.waterfall([
function parseXML(next) {
return _parseXml(request.post, (err, quietSetting, objects) => {
if (err || objects.length < 1 || objects.length > 1000) {
return next(errors.MalformedXML);
}
return next(null, quietSetting, objects);
});
return _parseXml(request.post,
(err, quietSetting, objects, itemErrors) => {
const len = objects.length + itemErrors.length;
if (err || len < 1 || len > 1000) {
return next(errors.MalformedXML);
}
return next(null, quietSetting, objects, itemErrors);
});
},
function checkPolicies(quietSetting, objects, next) {
function checkPolicies(quietSetting, objects, itemErrors, next) {
// track the error results for any keys with
// an error response
const errorResults = [];
itemErrors.forEach(error => {
errorResults.push({
entry: {
key: error.key,
versionId: error.versionId,
},
error: error.error,
});
});
// track keys that are still on track to be deleted
const inPlay = [];
// if request from account, no need to check policies
@ -246,16 +340,16 @@ function multiObjectDelete(authInfo, request, log, callback) {
signatureAge: authParams.params.data.signatureAge,
},
parameterize: {
specificResource: objects,
specificResource: objects.map(entry => entry.key),
},
};
return vault.checkPolicies(requestContextParams, authInfo.getArn(),
log, (err, authorizationResults) => {
// there were no policies so received a blanket AccessDenied
if (err && err.AccessDenied) {
objects.forEach(key => {
objects.forEach(entry => {
errorResults.push({
key,
entry,
error: errors.AccessDenied });
});
// send empty array for inPlay
@ -287,12 +381,11 @@ function multiObjectDelete(authInfo, request, log, callback) {
log.error('wrong arn format from vault');
return next(errors.InternalError);
}
const key = result.arn.slice(slashIndex + 1);
if (result.isAllowed) {
inPlay.push(key);
inPlay.push(objects[i]);
} else {
errorResults.push({
key,
entry: objects[i],
error: errors.AccessDenied,
});
}
@ -327,9 +420,9 @@ function multiObjectDelete(authInfo, request, log, callback) {
log.trace("access denied due to bucket acl's");
// if access denied at the bucket level, no access for
// any of the objects so all results will be error results
inPlay.forEach(key => {
inPlay.forEach(entry => {
errorResults.push({
key,
entry,
error: errors.AccessDenied,
});
});
@ -344,14 +437,9 @@ function multiObjectDelete(authInfo, request, log, callback) {
},
function getObjMetadataAndDeleteStep(quietSetting, errorResults, inPlay,
bucket, next) {
return getObjMetadataAndDelete(bucketName, quietSetting,
errorResults, inPlay, log, (err, quietSetting, errorResults,
numOfObjects, successfullyDeleted,
totalContentLengthDeleted) => {
next(err, quietSetting, errorResults,
numOfObjects, successfullyDeleted,
totalContentLengthDeleted, bucket);
});
return getObjMetadataAndDelete(authInfo, canonicalID, request,
bucketName, bucket, quietSetting, errorResults, inPlay,
log, next);
},
], (err, quietSetting, errorResults, numOfObjects,
successfullyDeleted, totalContentLengthDeleted, bucket) => {

View File

@ -1,5 +1,5 @@
import async from 'async';
import { errors } from 'arsenal';
import { errors, versioning } from 'arsenal';
import collectCorsHeaders from '../utilities/collectCorsHeaders';
import data from '../data/wrapper';
@ -11,6 +11,8 @@ import validateHeaders from '../utilities/validateHeaders';
import { pushMetric } from '../utapi/utilities';
import removeAWSChunked from './apiUtils/object/removeAWSChunked';
const VID = versioning.VersionID;
/**
* Preps metadata to be saved (based on copy or replace request header)
* @param {object} sourceObjMD - object md of source object
@ -99,14 +101,15 @@ function _prepMetadata(sourceObjMD, headers, sourceIsDestination, authInfo,
* includes normalized headers
* @param {string} sourceBucket - name of source bucket for object copy
* @param {string} sourceObject - name of source object for object copy
* @param {string} sourceVersionId - versionId of source object for copy
* @param {object} log - the log request
* @param {function} callback - final callback to call with the result
* @return {undefined}
*/
export default
function objectCopy(authInfo, request, sourceBucket,
sourceObject, log, callback) {
log.debug('processing request', { method: 'objectCopy' });
sourceObject, sourceVersionId, log, callback) {
log.info('processing request', { method: 'objectCopy' });
const destBucketName = request.bucketName;
const destObjectKey = request.objectKey;
const sourceIsDestination =
@ -115,6 +118,7 @@ function objectCopy(authInfo, request, sourceBucket,
authInfo,
bucketName: sourceBucket,
objectKey: sourceObject,
versionId: sourceVersionId,
requestType: 'objectGet',
log,
};
@ -169,18 +173,20 @@ function objectCopy(authInfo, request, sourceBucket,
}
if (!sourceObjMD) {
log.debug('no source object', { sourceObject });
return next(errors.NoSuchKey, destBucketMD);
return next(errors.NoSuchKey, null, destBucketMD);
}
const headerValResult =
validateHeaders(sourceObjMD, request.headers);
if (headerValResult.error) {
return next(errors.PreconditionFailed, destBucketMD);
return next(errors.PreconditionFailed, null,
destBucketMD);
}
const storeMetadataParams =
_prepMetadata(sourceObjMD, request.headers,
sourceIsDestination, authInfo, destObjectKey, log);
if (storeMetadataParams.error) {
return next(storeMetadataParams.error, destBucketMD);
return next(storeMetadataParams.error, null,
destBucketMD);
}
let dataLocator;
// If 0 byte object just set dataLocator to empty array
@ -209,7 +215,6 @@ function objectCopy(authInfo, request, sourceBucket,
function goGetData(storeMetadataParams, dataLocator, destBucketMD,
destObjMD, next) {
const serverSideEncryption = destBucketMD.getServerSideEncryption();
// skip if source and dest the same or 0 byte object
// still send along serverSideEncryption info so algo
// and masterKeyId stored properly in metadata
@ -285,34 +290,51 @@ function objectCopy(authInfo, request, sourceBucket,
},
function storeNewMetadata(storeMetadataParams, destDataGetInfoArr,
destObjMD, serverSideEncryption, destBucketMD, next) {
return services.metadataStoreObject(destBucketName,
destDataGetInfoArr,
serverSideEncryption, storeMetadataParams, err => {
if (err) {
log.debug('error storing new metadata', { error: err });
return next(err, destBucketMD);
}
// Clean up any potential orphans in data if object
// put is an overwrite of already existing
// object with same name
// so long as the source is not the same as the destination
let dataToDelete;
if (destObjMD && destObjMD.location &&
!sourceIsDestination) {
dataToDelete = Array.isArray(destObjMD.location) ?
destObjMD.location : [destObjMD.location];
data.batchDelete(dataToDelete,
let versioningOptions = undefined;
async.waterfall([
callback => services.versioningPreprocessing(destBucketName,
destBucketMD, destObjectKey, destObjMD, null, log,
callback),
(options, callback) => {
versioningOptions = options;
// eslint-disable-next-line
storeMetadataParams.versionId = options.versionId;
// eslint-disable-next-line
storeMetadataParams.versioning = options.versioning;
// eslint-disable-next-line
storeMetadataParams.isNull = options.isNull;
// eslint-disable-next-line
storeMetadataParams.nullVersionId = options.nullVersionId;
services.metadataStoreObject(destBucketName,
destDataGetInfoArr, serverSideEncryption,
storeMetadataParams, callback);
},
], (err, res) => {
if (err) {
log.debug('error storing new metadata', { error: err });
return next(err, destBucketMD);
}
// Clean up any potential orphans in data if object
// put is an overwrite of already existing
// object with same name
// so long as the source is not the same as the destination
let dataToDelete = undefined;
if (destObjMD && destObjMD.location &&
!sourceIsDestination && versioningOptions.deleteData) {
dataToDelete = Array.isArray(destObjMD.location) ?
destObjMD.location : [destObjMD.location];
data.batchDelete(dataToDelete,
logger.newRequestLoggerFromSerializedUids(
log.getSerializedUids()));
}
const sourceObjSize = storeMetadataParams.size;
const destObjPrevSize = destObjMD ?
destObjMD['content-length'] : null;
return next(null, destBucketMD, storeMetadataParams,
log.getSerializedUids()));
}
const sourceObjSize = storeMetadataParams.size;
const destObjPrevSize = destObjMD ?
destObjMD['content-length'] : null;
return next(null, res, destBucketMD, storeMetadataParams,
serverSideEncryption, sourceObjSize, destObjPrevSize);
});
});
},
], (err, destBucketMD, storeMetadataParams, serverSideEncryption,
], (err, res, destBucketMD, storeMetadataParams, serverSideEncryption,
sourceObjSize, destObjPrevSize) => {
const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, destBucketMD);
@ -338,6 +360,13 @@ function objectCopy(authInfo, request, sourceBucket,
serverSideEncryption.masterKeyId;
}
}
if (sourceVersionId) {
additionalHeaders['x-amz-copy-source-version-id'] =
VID.encrypt(sourceVersionId);
}
if (res && res.versionId) {
additionalHeaders['x-amz-version-id'] = VID.encrypt(res.versionId);
}
pushMetric('copyObject', log, {
authInfo,
bucket: destBucketName,

View File

@ -1,10 +1,14 @@
import { errors } from 'arsenal';
import { errors, versioning } from 'arsenal';
import async from 'async';
import collectCorsHeaders from '../utilities/collectCorsHeaders';
import services from '../services';
import validateHeaders from '../utilities/validateHeaders';
import { pushMetric } from '../utapi/utilities';
import { cleanUpBucket } from './apiUtils/bucket/bucketCreation';
import { createAndStoreObject } from './objectPut';
const VID = versioning.VersionID;
/**
* objectDelete - DELETE an object from a bucket
@ -24,48 +28,124 @@ export default function objectDelete(authInfo, request, log, cb) {
}
const bucketName = request.bucketName;
const objectKey = request.objectKey;
let reqVersionId = request.query ? request.query.versionId : undefined;
if (reqVersionId && reqVersionId !== 'null') {
try {
reqVersionId = VID.decrypt(reqVersionId);
} catch (exception) { // eslint-disable-line
return cb(errors.InvalidArgument.customizeDescription(
'Invalid version id specified'), null);
}
}
const valParams = {
authInfo,
bucketName,
objectKey,
versionId: reqVersionId || undefined,
requestType: 'objectDelete',
log,
};
return services.metadataValidateAuthorization(valParams,
(err, bucket, objMD) => {
const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
if (err) {
log.debug('error processing request', {
error: err,
method: 'metadataValidateAuthorization',
});
return cb(err, corsHeaders);
}
const canonicalID = authInfo.getCanonicalID();
let bucketMD = undefined;
let objectMD = undefined;
let corsHeaders = undefined;
let removeDeleteMarker = false;
let deleteOptions = undefined;
return async.waterfall([
callback => services.metadataValidateAuthorization(valParams, callback),
(bucket, objMD, callback) => {
corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
bucketMD = bucket;
objectMD = objMD;
const versioning = bucketMD.getVersioningConfiguration();
// TODO adhere to the behaviour of AWS: create a delete marker
// even if the deleting object does not exist
if (!objMD) {
return cb(errors.NoSuchKey, corsHeaders);
}
const headerValResult = validateHeaders(objMD, request.headers);
if (headerValResult.error) {
return cb(headerValResult.error, corsHeaders);
}
if (objMD['content-length']) {
log.end().addDefaultFields({
contentLength: objMD['content-length'],
});
}
return services.deleteObject(bucketName, objMD, objectKey, log,
err => {
if (err) {
return cb(err, corsHeaders);
if (versioning) {
if (reqVersionId) {
corsHeaders['x-amz-version-id'] =
VID.encrypt(reqVersionId);
return cb(null, corsHeaders, null);
}
pushMetric('deleteObject', log, {
authInfo,
bucket: bucketName,
byteLength: objMD['content-length'],
numberOfObjects: 1,
} else {
return callback(errors.NoSuchKey);
}
}
if (objMD) {
const headerValResult = validateHeaders(objMD, request.headers);
if (headerValResult.error) {
return callback(headerValResult.error);
}
if (objMD['content-length']) {
log.end().addDefaultFields({
contentLength: objMD['content-length'],
});
return cb(null, corsHeaders);
});
});
}
}
return callback();
},
callback => services.preprocessingVersioningDelete(bucketName,
bucketMD, objectKey, objectMD, reqVersionId, log, callback),
(options, callback) => {
if (options && options.deleteData) {
// delete object
deleteOptions = options;
return callback(null, options);
}
// putting a new delete marker
if (bucketMD.hasDeletedFlag() &&
canonicalID !== bucketMD.getOwner()) {
log.trace('deleted flag on bucket and request ' +
'from non-owner account');
return callback(errors.NoSuchBucket);
}
if (bucketMD.hasTransientFlag() || bucketMD.hasDeletedFlag()) {
return cleanUpBucket(bucketMD, canonicalID,
log, err => callback(err, null));
}
return callback(null, null);
},
(options, callback) => {
if (options && options.deleteData) {
if (objectMD.isDeleteMarker) {
removeDeleteMarker = true;
}
return services.deleteObject(bucketName, objectMD, objectKey,
options, log, callback);
}
request.isDeleteMarker = true; // eslint-disable-line
return createAndStoreObject(bucketName, bucketMD,
objectKey, objectMD, authInfo, canonicalID, null, request,
null, log, callback);
},
], (err, res) => {
if (err) {
log.debug('error processing request', { error: err,
method: 'metadataValidateAuthorization' });
} else if (deleteOptions === undefined) {
// TODO metric for delete marker
if (res.versionId) {
corsHeaders['x-amz-delete-marker'] = true;
corsHeaders['x-amz-version-id'] = res.versionId === 'null' ?
res.versionId : VID.encrypt(res.versionId);
}
pushMetric('putObject', log, { authInfo, bucket: bucketName,
newByteLength: 0, oldByteLength: 0 });
} else {
if (reqVersionId) {
corsHeaders['x-amz-version-id'] = reqVersionId === 'null' ?
reqVersionId : VID.encrypt(reqVersionId);
if (removeDeleteMarker) {
corsHeaders['x-amz-delete-marker'] = true;
}
}
pushMetric('deleteObject', log, { authInfo, bucket: bucketName,
byteLength: objectMD['content-length'], numberOfObjects: 1 });
}
return cb(err, corsHeaders);
});
}

View File

@ -1,4 +1,5 @@
import { errors } from 'arsenal';
import { errors, versioning } from 'arsenal';
import async from 'async';
import { parseRange } from './apiUtils/object/parseRange';
import collectCorsHeaders from '../utilities/collectCorsHeaders';
@ -7,6 +8,8 @@ import services from '../services';
import validateHeaders from '../utilities/validateHeaders';
import { pushMetric } from '../utapi/utilities';
const VID = versioning.VersionID;
/**
* GET Object - Get an object
* @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info
@ -20,88 +23,150 @@ function objectGet(authInfo, request, log, callback) {
log.debug('processing request', { method: 'objectGet' });
const bucketName = request.bucketName;
const objectKey = request.objectKey;
let versionId = request.query ? request.query.versionId : undefined;
versionId = versionId || undefined; // to smooth out versionId ''
if (versionId && versionId !== 'null') {
try {
versionId = VID.decrypt(versionId);
} catch (exception) { // eslint-disable-line
return callback(errors.InvalidArgument
.customizeDescription('Invalid version id specified'), null);
}
}
const mdValParams = {
authInfo,
bucketName,
objectKey,
versionId: versionId === 'null' ? undefined : versionId,
requestType: 'objectGet',
log,
};
services.metadataValidateAuthorization(mdValParams, (err, bucket,
objMD) => {
const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
if (err) {
log.debug('error processing request', { error: err });
return callback(err, null, corsHeaders);
}
if (!objMD) {
return callback(errors.NoSuchKey, null, corsHeaders);
}
const headerValResult = validateHeaders(objMD, request.headers);
if (headerValResult.error) {
return callback(headerValResult.error, null, corsHeaders);
}
const responseMetaHeaders = collectResponseHeaders(objMD, corsHeaders);
// 0 bytes file
if (objMD.location === null) {
return async.waterfall([
next => services.metadataValidateAuthorization(mdValParams,
(err, bucket, objMD) => {
const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
if (err) {
log.debug('error processing request', { error: err });
return next(err, null, corsHeaders);
}
if (!objMD) {
return next(errors.NoSuchKey, null, corsHeaders);
}
if (versionId === undefined) {
return next(null, bucket, objMD);
}
if (versionId !== 'null') {
return next(null, bucket, objMD);
}
if (objMD.isNull || (objMD && !objMD.versionId)) {
return next(null, bucket, objMD);
}
if (objMD.nullVersionId === undefined) {
return next(errors.NoSuchVersion, null, corsHeaders);
}
mdValParams.versionId = objMD.nullVersionId;
return services.metadataValidateAuthorization(mdValParams,
(err, bucket, objMD) => {
if (err) {
return next(err, null, corsHeaders);
}
if (!objMD) {
return next(errors.NoSuchKey, null, corsHeaders);
}
return next(null, bucket, objMD);
});
}),
(bucket, objMD, next) => {
const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
const headerValResult = validateHeaders(objMD, request.headers);
if (headerValResult.error) {
return next(headerValResult.error, null, corsHeaders);
}
const responseMetaHeaders = collectResponseHeaders(objMD,
corsHeaders);
// TODO: can probably extract this in a utility function, similar
// logic is used for get ACL
if (bucket.getVersioningConfiguration()) {
if (objMD.isNull || (objMD && !objMD.versionId)) {
responseMetaHeaders['x-amz-version-id'] = 'null';
} else if (objMD.versionId) {
responseMetaHeaders['x-amz-version-id'] =
VID.encrypt(objMD.versionId);
}
}
if (objMD.isDeleteMarker) {
// TODO check if versionId should also be included
responseMetaHeaders['x-amz-delete-marker'] = true;
if (versionId) {
return next(errors.MethodNotAllowed, null,
responseMetaHeaders);
}
return next(errors.NoSuchKey, null, responseMetaHeaders);
}
// 0 bytes file
if (objMD.location === null) {
if (request.headers.range) {
return next(errors.InvalidRange, null, corsHeaders);
}
pushMetric('getObject', log, {
authInfo,
bucket: bucketName,
newByteLength: 0,
});
return next(null, null, responseMetaHeaders);
}
let range;
let maxContentLength;
if (request.headers.range) {
return callback(errors.InvalidRange, null, corsHeaders);
maxContentLength =
parseInt(responseMetaHeaders['Content-Length'], 10);
responseMetaHeaders['Accept-Ranges'] = 'bytes';
const parseRangeRes = parseRange(request.headers.range,
maxContentLength);
range = parseRangeRes.range;
const error = parseRangeRes.error;
if (error) {
return next(error, null, corsHeaders);
}
if (range) {
// End of range should be included so + 1
responseMetaHeaders['Content-Length'] =
Math.min(maxContentLength - range[0],
range[1] - range[0] + 1);
responseMetaHeaders['Content-Range'] = `bytes ${range[0]}-`
+ `${Math.min(maxContentLength - 1, range[1])}` +
`/${maxContentLength}`;
}
}
// To provide for backwards compatibility before md-model-version 2,
// need to handle cases where objMD.location is just a string
const dataLocator = Array.isArray(objMD.location) ?
objMD.location : [{ key: objMD.location }];
// If have a data model before version 2, cannot support get range
// for objects with multiple parts
if (range && dataLocator.length > 1 &&
dataLocator[0].start === undefined) {
return next(errors.NotImplemented, null, corsHeaders);
}
if (objMD['x-amz-server-side-encryption']) {
for (let i = 0; i < dataLocator.length; i++) {
dataLocator[i].masterKeyId =
objMD['x-amz-server-side-encryption-aws-kms-key-id'];
dataLocator[i].algorithm =
objMD['x-amz-server-side-encryption'];
}
}
pushMetric('getObject', log, {
authInfo,
bucket: bucketName,
newByteLength: 0,
newByteLength: responseMetaHeaders['Content-Length'],
});
return callback(null, null, responseMetaHeaders);
}
let range;
let maxContentLength;
if (request.headers.range) {
maxContentLength =
parseInt(responseMetaHeaders['Content-Length'], 10);
responseMetaHeaders['Accept-Ranges'] = 'bytes';
const parseRangeRes = parseRange(request.headers.range,
maxContentLength);
range = parseRangeRes.range;
const error = parseRangeRes.error;
if (error) {
return callback(error, null, corsHeaders);
}
if (range) {
// End of range should be included so + 1
responseMetaHeaders['Content-Length'] =
Math.min(maxContentLength - range[0],
range[1] - range[0] + 1);
responseMetaHeaders['Content-Range'] = `bytes ${range[0]}-`
+ `${Math.min(maxContentLength - 1, range[1])}` +
`/${maxContentLength}`;
}
}
// To provide for backwards compatibility before md-model-version 2,
// need to handle cases where objMD.location is just a string
const dataLocator = Array.isArray(objMD.location) ?
objMD.location : [{ key: objMD.location }];
// If have a data model before version 2, cannot support get range
// for objects with multiple parts
if (range && dataLocator.length > 1 &&
dataLocator[0].start === undefined) {
return callback(errors.NotImplemented, null, corsHeaders);
}
if (objMD['x-amz-server-side-encryption']) {
for (let i = 0; i < dataLocator.length; i++) {
dataLocator[i].masterKeyId =
objMD['x-amz-server-side-encryption-aws-kms-key-id'];
dataLocator[i].algorithm =
objMD['x-amz-server-side-encryption'];
}
}
pushMetric('getObject', log, {
authInfo,
bucket: bucketName,
newByteLength: responseMetaHeaders['Content-Length'],
});
return callback(null, dataLocator, responseMetaHeaders, range);
});
return next(null, dataLocator, responseMetaHeaders, range);
},
], callback);
}

View File

@ -1,4 +1,5 @@
import { errors } from 'arsenal';
import { errors, versioning } from 'arsenal';
import async from 'async';
import aclUtils from '../utilities/aclUtils';
import collectCorsHeaders from '../utilities/collectCorsHeaders';
@ -7,6 +8,8 @@ import { pushMetric } from '../utapi/utilities';
import services from '../services';
import vault from '../auth/vault';
const VID = versioning.VersionID;
// Sample XML response:
/*
<AccessControlPolicy>
@ -41,10 +44,23 @@ export default function objectGetACL(authInfo, request, log, callback) {
log.debug('processing request', { method: 'objectGetACL' });
const bucketName = request.bucketName;
const objectKey = request.objectKey;
let versionId = request.query ? request.query.versionId : undefined;
versionId = versionId || undefined; // to smooth out versionId ''
if (versionId && versionId !== 'null') {
try {
versionId = VID.decrypt(versionId);
} catch (exception) { // eslint-disable-line
return callback(errors.InvalidArgument
.customizeDescription('Invalid version id specified'), null);
}
}
const metadataValParams = {
authInfo,
bucketName,
objectKey,
versionId: versionId === 'null' ? undefined : versionId,
requestType: 'objectGetACL',
log,
};
@ -60,19 +76,59 @@ export default function objectGetACL(authInfo, request, log, callback) {
constants.logId,
];
services.metadataValidateAuthorization(metadataValParams,
return async.waterfall([
callback => services.metadataValidateAuthorization(metadataValParams,
(err, bucket, objectMD) => {
const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
if (err) {
log.trace('request authorization failed',
{ method: 'objectGetACL', error: err });
return callback(err, null, corsHeaders);
return callback(err, bucket);
}
if (!objectMD) {
const err = versionId ? errors.NoSuchVersion :
errors.NoSuchKey;
log.trace('error processing request',
{ method: 'objectGetACL', error: err });
return callback(errors.NoSuchKey, null, corsHeaders);
return callback(err, bucket);
}
if (versionId === undefined) {
return callback(null, bucket, objectMD);
}
if (versionId !== 'null') {
return callback(null, bucket, objectMD);
}
if (objectMD.isNull || (objectMD && !objectMD.versionId)) {
return callback(null, bucket, objectMD);
}
if (!objectMD.nullVersionId) {
return callback(errors.NoSuchVersion, bucket);
}
metadataValParams.versionId = objectMD.nullVersionId;
return services.metadataValidateAuthorization(
metadataValParams, (err, bucket, objectMD) => {
if (err) {
log.trace('request authorization failed',
{ method: 'objectGetACL', error: err });
return callback(err, bucket);
}
if (!objectMD) {
log.trace('error processing request',
{ method: 'objectGetACL', error: err });
return callback(errors.NoSuchVersion, bucket);
}
return callback(null, bucket, objectMD);
});
}),
(bucket, objectMD, callback) => {
// if versioning is enabled or suspended, return version id in
// response headers
let resVersionId;
if (bucket.getVersioningConfiguration()) {
if (objectMD.isNull || (objectMD && !objectMD.versionId)) {
resVersionId = 'null';
} else {
resVersionId = VID.encrypt(objectMD.versionId);
}
}
const objectACL = objectMD.acl;
const allSpecificGrants = [].concat(
@ -109,7 +165,7 @@ export default function objectGetACL(authInfo, request, log, callback) {
authInfo,
bucket: bucketName,
});
return callback(null, xml, corsHeaders);
return callback(null, bucket, xml, resVersionId);
}
/**
* Build array of all canonicalIDs used in ACLs so duplicates
@ -143,7 +199,7 @@ export default function objectGetACL(authInfo, request, log, callback) {
authInfo,
bucket: bucketName,
});
return callback(null, xml, corsHeaders);
return callback(null, bucket, xml, resVersionId);
}
/**
* If acl's set by account canonicalID,
@ -154,7 +210,7 @@ export default function objectGetACL(authInfo, request, log, callback) {
if (err) {
log.trace('error processing request',
{ method: 'objectGetACL', error: err });
return callback(err, null, corsHeaders);
return callback(err, bucket);
}
const individualGrants = canonicalIDs.map(canonicalID => {
/**
@ -180,11 +236,20 @@ export default function objectGetACL(authInfo, request, log, callback) {
.concat(individualGrants).concat(uriGrantInfo);
// parse info about accounts and owner info to convert to xml
const xml = aclUtils.convertToXml(grantInfo);
pushMetric('getObjectAcl', log, {
authInfo,
bucket: bucketName,
});
return callback(null, xml, corsHeaders);
return callback(null, bucket, xml, resVersionId);
});
},
], (err, bucket, xml, resVersionId) => {
const resHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
if (err) {
return callback(err, null, resHeaders);
}
pushMetric('getObjectAcl', log, {
authInfo,
bucket: bucketName,
});
resHeaders['x-amz-version-id'] = resVersionId;
return callback(null, xml, resHeaders);
});
}

View File

@ -1,4 +1,5 @@
import { errors } from 'arsenal';
import { errors, versioning } from 'arsenal';
import async from 'async';
import collectCorsHeaders from '../utilities/collectCorsHeaders';
import collectResponseHeaders from '../utilities/collectResponseHeaders';
@ -6,6 +7,8 @@ import services from '../services';
import validateHeaders from '../utilities/validateHeaders';
import { pushMetric } from '../utapi/utilities';
const VID = versioning.VersionID;
/**
* HEAD Object - Same as Get Object but only respond with headers
*(no actual body)
@ -20,15 +23,29 @@ export default function objectHead(authInfo, request, log, callback) {
log.debug('processing request', { method: 'objectHead' });
const bucketName = request.bucketName;
const objectKey = request.objectKey;
const metadataValParams = {
let versionId = request.query ? request.query.versionId : undefined;
versionId = versionId || undefined; // to smooth out versionId ''
if (versionId && versionId !== 'null') {
try {
versionId = VID.decrypt(versionId);
} catch (exception) { // eslint-disable-line
return callback(errors.InvalidArgument
.customizeDescription('Invalid version id specified'), null);
}
}
const mdValParams = {
authInfo,
bucketName,
objectKey,
versionId: versionId === 'null' ? undefined : versionId,
requestType: 'objectHead',
log,
};
return services.metadataValidateAuthorization(metadataValParams,
return async.waterfall([
next => services.metadataValidateAuthorization(mdValParams,
(err, bucket, objMD) => {
const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
@ -37,21 +54,48 @@ export default function objectHead(authInfo, request, log, callback) {
error: err,
method: 'metadataValidateAuthorization',
});
return callback(err, corsHeaders);
return next(err, corsHeaders);
}
if (!objMD) {
return callback(errors.NoSuchKey, corsHeaders);
return next(errors.NoSuchKey, corsHeaders);
}
if (versionId === undefined) {
return next(null, bucket, objMD);
}
if (versionId !== 'null') {
return next(null, bucket, objMD);
}
if (objMD.isNull) {
return next(null, bucket, objMD);
}
if (objMD.nullVersionId === undefined) {
return next(errors.NoSuchKey, corsHeaders);
}
mdValParams.versionId = objMD.nullVersionId;
return services.metadataValidateAuthorization(mdValParams,
(err, bucket, objMD) => {
if (err) {
return next(err, corsHeaders);
}
if (!objMD) {
return next(errors.NoSuchKey, corsHeaders);
}
return next(null, bucket, objMD);
});
}),
(bucket, objMD, next) => {
const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
const headerValResult = validateHeaders(objMD, request.headers);
if (headerValResult.error) {
return callback(headerValResult.error, corsHeaders);
return next(headerValResult.error, corsHeaders);
}
const responseMetaHeaders = collectResponseHeaders(objMD,
corsHeaders);
pushMetric('headObject', log, {
authInfo,
bucket: bucketName,
});
return callback(null, responseMetaHeaders);
});
const responseHeaders = collectResponseHeaders(objMD, corsHeaders);
if (versionId) {
responseHeaders['x-amz-version-id'] = VID.encrypt(versionId);
}
pushMetric('headObject', log, { authInfo, bucket: bucketName });
return next(null, responseHeaders);
},
], callback);
}

View File

@ -1,4 +1,5 @@
import { errors } from 'arsenal';
import { errors, versioning } from 'arsenal';
import async from 'async';
import data from '../data/wrapper';
import services from '../services';
@ -12,23 +13,27 @@ import { logger } from '../utilities/logger';
import { pushMetric } from '../utapi/utilities';
import kms from '../kms/wrapper';
import removeAWSChunked from './apiUtils/object/removeAWSChunked';
import metadata from '../metadata/wrapper';
const VID = versioning.VersionID;
function _storeInMDandDeleteData(bucketName, dataGetInfo, cipherBundle,
metadataStoreParams, dataToDelete, deleteLog, callback) {
services.metadataStoreObject(bucketName, dataGetInfo,
cipherBundle, metadataStoreParams, (err, contentMD5) => {
cipherBundle, metadataStoreParams, (err, res) => {
if (err) {
return callback(err);
}
if (dataToDelete) {
data.batchDelete(dataToDelete, deleteLog);
}
return callback(null, contentMD5);
return callback(null, res);
});
}
function _storeIt(bucketName, objectKey, objMD, authInfo, canonicalID,
cipherBundle, request, streamingV4Params, log, callback) {
function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
canonicalID, cipherBundle, request, streamingV4Params, log, callback) {
const size = request.parsedContentLength;
const websiteRedirectHeader =
@ -55,98 +60,85 @@ function _storeIt(bucketName, objectKey, objMD, authInfo, canonicalID,
headers['x-amz-acl'] = request.query['x-amz-acl'];
}
const metadataStoreParams = {
objectKey,
authInfo,
metaHeaders,
size,
objectKey, authInfo, metaHeaders, size,
contentType: request.headers['content-type'],
cacheControl: request.headers['cache-control'],
contentDisposition: request.headers['content-disposition'],
contentEncoding:
removeAWSChunked(request.headers['content-encoding']),
expires: request.headers.expires,
headers,
log,
contentEncoding: removeAWSChunked(request.headers['content-encoding']),
expires: request.headers.expires, headers, log,
isDeleteMarker: request.isDeleteMarker,
};
let dataToDelete;
let dataGetInfoArr = undefined;
let dataToDelete = undefined;
if (objMD && objMD.location) {
dataToDelete = Array.isArray(objMD.location) ?
objMD.location : [objMD.location];
}
const reqVersionId = request.query ? request.query.versionId : undefined;
// null - new object
// 0 or > 0 - existing object with content-length 0 or greater than 0
const prevContentLen = objMD && objMD['content-length'] !== undefined ?
objMD['content-length'] : null;
if (size !== 0) {
log.trace('storing object in data', {
method: 'services.metadataValidateAuthorization',
});
return dataStore(objectKeyContext, cipherBundle, request, size,
streamingV4Params, log, (err, dataGetInfo, calculatedHash) => {
if (err) {
log.trace('error from data', {
error: err,
method: 'dataStore',
});
return callback(err);
}
// So that data retrieval information for MPU's and
// regular puts are stored in the same data structure,
// place the retrieval info here into a single element array
const dataGetInfoArr = [{
key: dataGetInfo.key,
size,
start: 0,
dataStoreName: dataGetInfo.dataStoreName,
}];
if (cipherBundle) {
dataGetInfoArr[0].cryptoScheme = cipherBundle.cryptoScheme;
dataGetInfoArr[0].cipheredDataKey =
cipherBundle.cipheredDataKey;
}
metadataStoreParams.contentMD5 = calculatedHash;
return _storeInMDandDeleteData(
bucketName, dataGetInfoArr, cipherBundle,
metadataStoreParams, dataToDelete,
logger.newRequestLoggerFromSerializedUids(
log.getSerializedUids()), (err, contentMD5) => {
if (err) {
return callback(err);
}
pushMetric('putObject', log, {
authInfo,
bucket: bucketName,
newByteLength: size,
oldByteLength: prevContentLen,
});
return callback(null, contentMD5);
});
});
}
log.trace('content-length is 0 so only storing metadata', {
method: 'services.metadataValidateAuthorization',
});
metadataStoreParams.contentMD5 = constants.emptyFileMd5;
const dataGetInfo = null;
return _storeInMDandDeleteData(bucketName, dataGetInfo, cipherBundle,
metadataStoreParams, dataToDelete,
logger.newRequestLoggerFromSerializedUids(log
.getSerializedUids()), (err, contentMD5) => {
if (err) {
return callback(err);
}
pushMetric('putObject', log, {
authInfo,
bucket: bucketName,
newByteLength: size,
oldByteLength: prevContentLen,
const requestLogger =
logger.newRequestLoggerFromSerializedUids(log.getSerializedUids());
return async.waterfall([
callback => {
if (size === 0) {
metadataStoreParams.contentMD5 = constants.emptyFileMd5;
return callback(null, null, null);
}
return dataStore(objectKeyContext, cipherBundle, request, size,
streamingV4Params, log, callback);
},
(dataGetInfo, calculatedHash, callback) => {
if (dataGetInfo === null || dataGetInfo === undefined) {
return callback(null, null);
}
// So that data retrieval information for MPU's and
// regular puts are stored in the same data structure,
// place the retrieval info here into a single element array
const { key, dataStoreName } = dataGetInfo;
const dataGetInfoArr = [{ key, size, start: 0, dataStoreName }];
if (cipherBundle) {
dataGetInfoArr[0].cryptoScheme = cipherBundle.cryptoScheme;
dataGetInfoArr[0].cipheredDataKey =
cipherBundle.cipheredDataKey;
}
metadataStoreParams.contentMD5 = calculatedHash;
return callback(null, dataGetInfoArr);
},
(infoArr, callback) => {
dataGetInfoArr = infoArr;
return services.versioningPreprocessing(bucketName, bucketMD,
metadataStoreParams.objectKey, objMD, reqVersionId, log,
callback);
},
(options, callback) => {
if (!options.deleteNullVersionData) {
return callback(null, options);
}
const params = { versionId: options.nullVersionId };
return metadata.getObjectMD(bucketName, objectKey,
params, log, (err, nullObjMD) => {
if (nullObjMD.location) {
dataToDelete = Array.isArray(nullObjMD.location) ?
nullObjMD.location : [nullObjMD.location];
}
return callback(null, options);
});
return callback(null, contentMD5);
});
},
(options, callback) => {
metadataStoreParams.versionId = options.versionId;
metadataStoreParams.versioning = options.versioning;
metadataStoreParams.isNull = options.isNull;
metadataStoreParams.nullVersionId = options.nullVersionId;
return _storeInMDandDeleteData(bucketName, dataGetInfoArr,
cipherBundle, metadataStoreParams,
options.deleteData ? dataToDelete : undefined,
requestLogger, callback);
},
], callback);
}
/**
* PUT Object in the requested bucket. Steps include:
* validating metadata for authorization, bucket and object existence etc.
@ -166,7 +158,6 @@ function _storeIt(bucketName, objectKey, objMD, authInfo, canonicalID,
* @param {Function} callback - final callback to call with the result
* @return {undefined}
*/
export default
function objectPut(authInfo, request, streamingV4Params, log, callback) {
log.debug('processing request', { method: 'objectPut' });
if (!aclUtils.checkGrantHeaderValidity(request.headers)) {
@ -175,18 +166,13 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
}
const bucketName = request.bucketName;
const objectKey = request.objectKey;
const valParams = {
authInfo,
bucketName,
objectKey,
requestType: 'objectPut',
log,
};
const requestType = 'objectPut';
const valParams = { authInfo, bucketName, objectKey, requestType, log };
const canonicalID = authInfo.getCanonicalID();
log.trace('owner canonicalID to send to data', { canonicalID });
return services.metadataValidateAuthorization(valParams, (err, bucket,
objMD) => {
return services.metadataValidateAuthorization(valParams,
(err, bucket, objMD) => {
const corsHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
if (err) {
@ -196,74 +182,50 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
});
return callback(err, null, corsHeaders);
}
if (bucket.hasDeletedFlag() &&
canonicalID !== bucket.getOwner()) {
if (bucket.hasDeletedFlag() && canonicalID !== bucket.getOwner()) {
log.trace('deleted flag on bucket and request ' +
'from non-owner account');
return callback(errors.NoSuchBucket);
}
const serverSideEncryption = bucket.getServerSideEncryption();
if (bucket.hasTransientFlag() ||
bucket.hasDeletedFlag()) {
log.trace('transient or deleted flag so cleaning up bucket');
return cleanUpBucket(bucket,
canonicalID, log, err => {
if (err) {
log.debug('error cleaning up bucket with flag',
{ error: err,
transientFlag:
bucket.hasTransientFlag(),
deletedFlag:
bucket.hasDeletedFlag(),
});
// To avoid confusing user with error
// from cleaning up
// bucket return InternalError
return callback(errors.InternalError, null,
corsHeaders);
}
if (serverSideEncryption) {
return kms.createCipherBundle(
serverSideEncryption,
log, (err, cipherBundle) => {
if (err) {
return callback(errors.InternalError,
null, corsHeaders);
}
return _storeIt(bucketName, objectKey,
objMD, authInfo, canonicalID,
cipherBundle, request,
streamingV4Params, log,
(err, contentMD5) =>
callback(err, contentMD5,
corsHeaders));
});
}
return _storeIt(bucketName, objectKey, objMD,
authInfo, canonicalID, null, request,
streamingV4Params, log,
(err, contentMD5) =>
callback(err, contentMD5, corsHeaders));
});
}
if (serverSideEncryption) {
return kms.createCipherBundle(
serverSideEncryption,
log, (err, cipherBundle) => {
if (err) {
return callback(errors.InternalError, null,
corsHeaders);
}
return _storeIt(bucketName, objectKey, objMD,
authInfo, canonicalID, cipherBundle,
request, streamingV4Params, log,
(err, contentMD5) =>
callback(err, contentMD5, corsHeaders));
});
}
return _storeIt(bucketName, objectKey, objMD, authInfo, canonicalID,
null, request, streamingV4Params, log,
(err, contentMD5) =>
callback(err, contentMD5, corsHeaders));
return async.waterfall([
callback => {
if (bucket.hasTransientFlag() || bucket.hasDeletedFlag()) {
return cleanUpBucket(bucket, canonicalID, log, callback);
}
return callback();
},
callback => {
const serverSideEncryption = bucket.getServerSideEncryption();
if (serverSideEncryption) {
return kms.createCipherBundle(
serverSideEncryption, log, callback);
}
return callback(null, null);
},
(cipherBundle, callback) => createAndStoreObject(bucketName,
bucket, objectKey, objMD, authInfo, canonicalID, cipherBundle,
request, streamingV4Params, log, callback),
], (err, res) => {
if (err) {
return callback(err, null, corsHeaders);
}
const newByteLength = request.parsedContentLength;
const oldByteLength = objMD ? objMD['content-length'] : null;
pushMetric('putObject', log, { authInfo, bucket: bucketName,
newByteLength, oldByteLength });
if (res) {
corsHeaders.ETag = `"${res.contentMD5}"`;
}
const vcfg = bucket.getVersioningConfiguration();
if (vcfg && vcfg.Status === 'Enabled') {
if (res && res.versionId) {
corsHeaders['x-amz-version-id'] =
VID.encrypt(res.versionId);
}
}
return callback(null, res, corsHeaders);
});
});
}
module.exports = { createAndStoreObject, objectPut };

View File

@ -1,4 +1,4 @@
import { errors } from 'arsenal';
import { errors, versioning } from 'arsenal';
import async from 'async';
import acl from '../metadata/acl';
@ -9,6 +9,8 @@ import constants from '../../constants';
import services from '../services';
import vault from '../auth/vault';
const VID = versioning.VersionID;
/*
Format of xml request:
@ -68,6 +70,17 @@ export default function objectPutACL(authInfo, request, log, cb) {
requestType: 'objectPutACL',
log,
};
let reqVersionId = request.query ? request.query.versionId : undefined;
if (reqVersionId && reqVersionId !== 'null') {
try {
reqVersionId = VID.decrypt(reqVersionId);
} catch (exception) { // eslint-disable-line
return cb(errors.InvalidArgument
.customizeDescription('Invalid version id specified'), null);
}
metadataValParams.versionId = reqVersionId;
}
const possibleGrants = ['FULL_CONTROL', 'WRITE_ACP', 'READ', 'READ_ACP'];
const addACLParams = {
Canned: '',
@ -88,11 +101,42 @@ export default function objectPutACL(authInfo, request, log, cb) {
request.headers['x-amz-grant-full-control'], 'FULL_CONTROL');
return async.waterfall([
next => services.metadataValidateAuthorization(metadataValParams, next),
(bucket, objectMD, next) => {
if (!objectMD) {
return next(errors.NoSuchKey, bucket);
next => services.metadataValidateAuthorization(metadataValParams,
(err, bucket, objectMD) => {
if (err) {
return next(err);
}
if (!objectMD) {
const err = reqVersionId ? errors.NoSuchVersion :
errors.NoSuchKey;
return next(err, bucket);
}
if (!reqVersionId) {
return next(null, bucket, objectMD);
}
if (reqVersionId !== 'null') {
return next(null, bucket, objectMD);
}
if (objectMD.isNull || (objectMD && !objectMD.versionId)) {
return next(null, bucket, objectMD);
}
if (objectMD.nullVersionId === undefined) {
return next(errors.NoSuchVersion, bucket);
}
metadataValParams.versionId = objectMD.nullVersionId;
return services.metadataValidateAuthorization(metadataValParams,
(err, bucket, objectMD) => {
if (err) {
return next(err);
}
if (!objectMD) {
return next(errors.NoSuchVersion, bucket);
}
return next(null, bucket, objectMD);
});
}),
(bucket, objectMD, next) => {
metadataValParams.versionId = objectMD.versionId;
// If not setting acl through headers, parse body
let jsonGrants;
let aclOwnerID;
@ -241,23 +285,36 @@ export default function objectPutACL(authInfo, request, log, cb) {
},
function waterfall4(bucket, objectMD, ACLParams, next) {
// Add acl's to object metadata
acl.addObjectACL(bucket, objectKey, objectMD, ACLParams, log, next);
const params = metadataValParams.versionId ?
{ versionId: metadataValParams.versionId } : {};
acl.addObjectACL(bucket, objectKey, objectMD,
ACLParams, params, log, err => next(err, bucket, objectMD));
},
], (err, bucket) => {
const corsHeaders = collectCorsHeaders(request.headers.origin,
], (err, bucket, objectMD) => {
const resHeaders = collectCorsHeaders(request.headers.origin,
request.method, bucket);
if (err) {
log.trace('error processing request', {
error: err,
method: 'objectPutACL',
});
return cb(err, corsHeaders);
return cb(err, resHeaders);
}
// if versioning is enabled or suspended, return version id in
// response headers
if (bucket.getVersioningConfiguration()) {
if (objectMD.isNull || (objectMD && !objectMD.versionId)) {
resHeaders['x-amz-version-id'] = 'null';
} else {
resHeaders['x-amz-version-id'] =
VID.encrypt(objectMD.versionId);
}
}
log.trace('processed request successfully in object put acl api');
pushMetric('putObjectAcl', log, {
authInfo,
bucket: bucketName,
});
return cb(null, corsHeaders);
return cb(null, resHeaders);
});
}

View File

@ -1,5 +1,5 @@
import async from 'async';
import { errors } from 'arsenal';
import { errors, versioning } from 'arsenal';
import collectCorsHeaders from '../utilities/collectCorsHeaders';
import constants from '../../constants';
@ -12,6 +12,8 @@ import services from '../services';
import setUpCopyLocator from './apiUtils/object/setUpCopyLocator';
import validateHeaders from '../utilities/validateHeaders';
const VID = versioning.VersionID;
/**
* PUT Part Copy during a multipart upload.
@ -21,13 +23,14 @@ import validateHeaders from '../utilities/validateHeaders';
* includes normalized headers
* @param {string} sourceBucket - name of source bucket for object copy
* @param {string} sourceObject - name of source object for object copy
* @param {string} sourceVersionId - versionId of the source object for copy
* @param {object} log - the request logger
* @param {function} callback - final callback to call with the result
* @return {undefined}
*/
export default
function objectPutCopyPart(authInfo, request, sourceBucket,
sourceObject, log, callback) {
sourceObject, sourceVersionId, log, callback) {
log.debug('processing request', { method: 'objectPutCopyPart' });
const destBucketName = request.bucketName;
const destObjectKey = request.objectKey;
@ -35,6 +38,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
authInfo,
bucketName: sourceBucket,
objectKey: sourceObject,
versionId: sourceVersionId,
requestType: 'objectGet',
log,
};
@ -322,6 +326,10 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
serverSideEncryption.masterKeyId;
}
}
if (sourceVersionId) {
additionalHeaders['x-amz-copy-source-version-id'] =
VID.encrypt(sourceVersionId);
}
// TODO push metric for objectPutCopyPart
// pushMetric('putObjectCopyPart', log, {
// bucket: destBucketName,

88
lib/metadata/VersionID.js Normal file
View File

@ -0,0 +1,88 @@
// VersionID format:
// timestamp sequential_position site_id other_information
// where:
// - timestamp 14 bytes epoch in ms (good untill 5138)
// - sequential_position 06 bytes position in the ms slot (1B ops)
// - site_id 05 bytes site identifier (like PARIS)
// - other_information arbitrary user input, such as a unique string
// the lengths of the components in bytes
const LENGTH_TS = 14; // timestamp: epoch in ms
const LENGTH_SQ = 6; // position in ms slot
const LENGTH_ST = 5; // site identifier
// empty string template for the variables in a versionId
const TEMPLATE_TS = new Array(LENGTH_TS + 1).join('0');
const TEMPLATE_SQ = new Array(LENGTH_SQ + 1).join('0');
const TEMPLATE_ST = new Array(LENGTH_ST + 1).join(' ');
// site identifier, like PARIS, TOKYO; will be trimmed if exceeding max length
const SITE_ID = `${process.env.SITE_ID}${TEMPLATE_ST}`.slice(0, LENGTH_ST);
// constants for max epoch and max sequential number in the same epoch
const MAX_TS = Math.pow(10, LENGTH_TS) - 1; // good until 16 Nov 5138
const MAX_SQ = Math.pow(10, LENGTH_SQ) - 1; // good for 1 billion ops
// the earliest versionId, used for versions before versioning
const VID_INF = `${TEMPLATE_TS}${MAX_TS}`.slice(-LENGTH_TS) +
`${TEMPLATE_SQ}${MAX_SQ}`.slice(-LENGTH_SQ) + SITE_ID;
// internal state of the module
let prvts = 0; // epoch of the last versionId
let prvsq = 0; // sequential number of the last versionId
/**
* This function ACTIVELY (wastes CPU cycles and) waits for an amount of time
* before returning to the caller. This should not be used frequently.
*
* @param {Number} span - time to wait in nanoseconds (1/1000000 millisecond)
* @return {Undefined} - nothing
*/
function wait(span) {
function getspan(diff) {
return diff[0] * 1e9 + diff[1];
}
const start = process.hrtime();
while (getspan(process.hrtime(start)) < span);
}
/**
* This function returns a "versionId" string indicating the current time as a
* combination of the current time in millisecond, the position of the request
* in that millisecond, and the identifier of the local site (which could be
* datacenter, region, or server depending on the notion of geographics). This
* function is stateful which means it keeps some values in the memory and the
* next call depends on the previous call.
*
* @param {string} info - the additional info to ensure uniqueness if desired
* @return {string} - the formated versionId string
*/
function generateVersionId(info) {
// Need to wait for the millisecond slot got "flushed". We wait for
// only a single millisecond when the module is restarted, which is
// necessary for the correctness of the system. This is therefore cheap.
if (prvts === 0) {
wait(1000000);
}
// get the present epoch (in millisecond)
const ts = Date.now();
// A bit more rationale: why do we use a sequence number instead of using
// process.hrtime which gives us time in nanoseconds? The idea is that at
// any time resolution, some concurrent requests may have the same time due
// to the way the OS is queueing requests or getting clock cycles. Our
// approach however will give the time based on the position of a request
// in the queue for the same millisecond which is supposed to be unique.
// increase the position if this request is in the same epoch
prvsq = (prvts === ts) ? prvsq + 1 : 0;
prvts = ts;
// In the default cases, we reverse the chronological order of the
// timestamps so that all versions of an object can be retrieved in the
// reversed chronological order---newest versions first. This is because of
// the limitation of leveldb for listing keys in the reverse order.
return `${TEMPLATE_TS}${MAX_TS - prvts}`.slice(-LENGTH_TS) +
`${TEMPLATE_SQ}${MAX_SQ - prvsq}`.slice(-LENGTH_SQ) + SITE_ID + info;
}
module.exports = { generateVersionId, VID_INF };

View File

@ -12,7 +12,7 @@ const acl = {
metadata.updateBucket(bucket.getName(), bucket, log, cb);
},
addObjectACL(bucket, objectKey, objectMD, addACLParams, log, cb, params) {
addObjectACL(bucket, objectKey, objectMD, addACLParams, params, log, cb) {
log.trace('updating object acl in metadata');
// eslint-disable-next-line no-param-reassign
objectMD.acl = addACLParams;

View File

@ -411,36 +411,6 @@ class BucketFileInterface {
});
}
/**
* This function checks if params have a property name
* If there is add it to the finalParams
* Else do nothing
* @param {String} name - The parameter name
* @param {Object} params - The params to search
* @param {Object} extParams - The params sent to the extension
* @return {undefined}
*/
addExtensionParam(name, params, extParams) {
if (params.hasOwnProperty(name)) {
// eslint-disable-next-line no-param-reassign
extParams[name] = params[name];
}
}
/**
* Used for advancing the last character of a string for setting upper/lower
* bounds
* For e.g., _setCharAt('demo1') results in 'demo2',
* _setCharAt('scality') results in 'scalitz'
* @param {String} str - string to be advanced
* @return {String} - modified string
*/
_setCharAt(str) {
let chr = str.charCodeAt(str.length - 1);
chr = String.fromCharCode(chr + 1);
return str.substr(0, str.length - 1) + chr;
}
/**
* This complex function deals with different extensions of bucket listing:
* Delimiter based search or MPU based search.
@ -451,39 +421,9 @@ class BucketFileInterface {
* @return {undefined}
*/
internalListObject(bucketName, params, log, cb) {
const requestParams = {};
let Ext;
const extParams = {};
// multipart upload listing
if (params.listingType === 'multipartuploads') {
Ext = arsenal.algorithms.list.MPU;
this.addExtensionParam('queryPrefixLength', params, extParams);
this.addExtensionParam('splitter', params, extParams);
if (params.keyMarker) {
requestParams.gt = `overview${params.splitter}` +
`${params.keyMarker}${params.splitter}`;
if (params.uploadIdMarker) {
requestParams.gt += `${params.uploadIdMarker}`;
}
// advance so that lower bound does not include the supplied
// markers
requestParams.gt = this._setCharAt(requestParams.gt);
}
} else {
Ext = arsenal.algorithms.list.Delimiter;
if (params.marker) {
requestParams.gt = params.marker;
this.addExtensionParam('gt', requestParams, extParams);
}
}
this.addExtensionParam('delimiter', params, extParams);
this.addExtensionParam('maxKeys', params, extParams);
if (params.prefix) {
requestParams.start = params.prefix;
requestParams.lt = this._setCharAt(params.prefix);
this.addExtensionParam('start', requestParams, extParams);
}
const extension = new Ext(extParams, log);
const extName = params.listingType || 'Basic';
const extension = new arsenal.algorithms.list[extName](params, log);
const requestParams = extension.genMDParams();
this.loadDBIfExists(bucketName, log, (err, db) => {
if (err) {
return cb(err);

View File

@ -1,9 +1,25 @@
import { errors, algorithms } from 'arsenal';
import { errors, algorithms, versioning } from 'arsenal';
import getMultipartUploadListing from './getMultipartUploadListing';
import { metadata } from './metadata';
const genVID = versioning.VersionID.generateVersionId;
const defaultMaxKeys = 1000;
let uidCounter = 0;
function generateVersionId() {
return genVID(uidCounter++);
}
function formatVersionKey(key, versionId) {
return `${key}\0${versionId}`;
}
function inc(str) {
return str ? (str.slice(0, str.length - 1) +
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
}
const metastore = {
createBucket: (bucketName, bucketMD, log, cb) => {
@ -64,12 +80,36 @@ const metastore = {
process.nextTick(() => {
metastore.getBucketAttributes(bucketName, log, err => {
// TODO: implement versioning for in-memory backend
const data = undefined;
if (err) {
return cb(err);
}
if (params && params.versioning) {
const versionId = generateVersionId();
objVal.versionId = versionId; // eslint-disable-line
metadata.keyMaps.get(bucketName).set(objName, objVal);
// eslint-disable-next-line
objName = formatVersionKey(objName, versionId);
metadata.keyMaps.get(bucketName).set(objName, objVal);
return cb(null, `{"versionId":"${versionId}"}`);
}
if (params && params.versionId === '') {
const versionId = generateVersionId();
objVal.versionId = versionId; // eslint-disable-line
metadata.keyMaps.get(bucketName).set(objName, objVal);
return cb(null, `{"versionId":"${objVal.versionId}"}`);
} else if (params && params.versionId) {
objVal.versionId = params.versionId; // eslint-disable-line
const mst = metadata.keyMaps.get(bucketName).get(objName);
if (mst && mst.versionId === params.versionId) {
metadata.keyMaps.get(bucketName).set(objName, objVal);
}
// eslint-disable-next-line
objName = formatVersionKey(objName, params.versionId);
metadata.keyMaps.get(bucketName).set(objName, objVal);
return cb(null, `{"versionId":"${objVal.versionId}"}`);
}
metadata.keyMaps.get(bucketName).set(objName, objVal);
return cb(err, data);
return cb(null);
});
});
},
@ -80,6 +120,10 @@ const metastore = {
if (err) {
return cb(err, { bucket });
}
if (params && params.versionId) {
// eslint-disable-next-line
objName = formatVersionKey(objName, params.versionId);
}
if (!metadata.keyMaps.has(bucketName)
|| !metadata.keyMaps.get(bucketName).has(objName)) {
return cb(null, { bucket: bucket.serialize() });
@ -100,6 +144,10 @@ const metastore = {
if (err) {
return cb(err);
}
if (params && params.versionId) {
// eslint-disable-next-line
objName = formatVersionKey(objName, params.versionId);
}
if (!metadata.keyMaps.has(bucketName)
|| !metadata.keyMaps.get(bucketName).has(objName)) {
return cb(errors.NoSuchKey);
@ -118,6 +166,29 @@ const metastore = {
if (!metadata.keyMaps.get(bucketName).has(objName)) {
return cb(errors.NoSuchKey);
}
if (params && params.versionId) {
const baseKey = inc(formatVersionKey(objName, ''));
const vobjName = formatVersionKey(objName,
params.versionId);
metadata.keyMaps.get(bucketName).delete(vobjName);
const mst = metadata.keyMaps.get(bucketName).get(objName);
if (mst.versionId === params.versionId) {
const keys = [];
metadata.keyMaps.get(bucketName).forEach((val, key) => {
if (key < baseKey && key > vobjName) {
keys.push(key);
}
});
if (keys.length === 0) {
metadata.keyMaps.get(bucketName).delete(objName);
return cb();
}
const key = keys.sort()[0];
const value = metadata.keyMaps.get(bucketName).get(key);
metadata.keyMaps.get(bucketName).set(objName, value);
}
return cb();
}
metadata.keyMaps.get(bucketName).delete(objName);
return cb();
});
@ -161,22 +232,30 @@ const metastore = {
if (!metadata.keyMaps.has(bucketName)) {
return cb(errors.NoSuchBucket);
}
const keys = [];
metadata.keyMaps.get(bucketName).forEach((val, key) => {
if (marker === undefined || key > marker) {
keys.push(key);
}
});
keys.sort();
// If marker specified, edit the keys array so it
// only contains keys that occur alphabetically after the marker
const filterParameters = {
delimiter,
start: prefix,
maxKeys: numKeys,
gt: marker,
};
const Ext = new algorithms.list.Delimiter(filterParameters, log);
const listingType = params.listingType || 'Delimiter';
const extension = new algorithms.list[listingType](params, log);
const listingParams = extension.genMDParams();
const keys = [];
metadata.keyMaps.get(bucketName).forEach((val, key) => {
if (listingParams.gt && listingParams.gt >= key) {
return null;
}
if (listingParams.gte && listingParams.gte > key) {
return null;
}
if (listingParams.lt && key >= listingParams.lt) {
return null;
}
if (listingParams.lte && key > listingParams.lte) {
return null;
}
return keys.push(key);
});
keys.sort();
// Iterate through keys array and filter keys containing
// delimiter into response.CommonPrefixes and filter remaining
@ -198,11 +277,11 @@ const metastore = {
// not filtered.
// Also, Ext.filter returns false when hit max keys.
// What a nifty function!
if (!Ext.filter(obj)) {
if (extension.filter(obj) < 0) {
break;
}
}
return cb(null, Ext.result());
return cb(null, extension.result());
});
},

View File

@ -125,17 +125,47 @@ const metadata = {
},
listObject: (bucketName, listingParams, log, cb) => {
client
.listObject(bucketName, listingParams,
log, (err, data) => {
log.debug('getting object listing from metadata');
if (err) {
log.debug('error from metadata', { implName, err });
return cb(err);
}
log.debug('object listing retrieved from metadata');
return cb(err, data);
});
if (listingParams.listingType === undefined) {
// eslint-disable-next-line
listingParams.listingType = 'Delimiter';
}
client.listObject(bucketName, listingParams, log, (err, data) => {
log.debug('getting object listing from metadata');
if (err) {
log.debug('error from metadata', { implName, err });
return cb(err);
}
log.debug('object listing retrieved from metadata');
if (listingParams.listingType === 'DelimiterVersions') {
return cb(err, data);
}
// eslint-disable-next-line
data.Contents = data.Contents.map(entry => {
const tmp = JSON.parse(entry.value);
return {
key: entry.key,
value: {
Size: tmp['content-length'],
ETag: tmp['content-md5'],
VersionId: tmp.versionId,
IsNull: tmp.isNull,
IsDeleteMarker: tmp.isDeleteMarker,
LastModified: tmp['last-modified'],
Owner: {
DisplayName: tmp['owner-display-name'],
ID: tmp['owner-id'],
},
StorageClass: tmp['x-amz-storage-class'],
Initiated: tmp.initiated,
Initiator: tmp.initiator,
EventualStorageBucket: tmp.eventualStorageBucket,
partLocations: tmp.partLocations,
creationDate: tmp.creationDate,
},
};
});
return cb(err, data);
});
},
listMultipartUploads: (bucketName, listingParams, log, cb) => {

View File

@ -93,7 +93,7 @@ export default function routePUT(request, response, log, statsClient) {
if (mfaDelete) {
log.debug('mfa deletion is not implemented');
return routesUtils.responseNoBody(
errors.NotImplemented.customizedDescription(
errors.NotImplemented.customizeDescription(
'MFA Deletion is not supported yet.'), null,
response, null, log);
}
@ -256,16 +256,14 @@ export default function routePUT(request, response, log, statsClient) {
});
api.callApiMethod('objectPut', request, log,
(err, contentMD5, corsHeaders) => {
(err, res, corsHeaders) => { // eslint-disable-line
if (err) {
return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log);
}
// ETag's hex should always be enclosed in quotes
statsReport500(err, statsClient);
const resMetaHeaders = corsHeaders || {};
resMetaHeaders.ETag = `"${contentMD5}"`;
return routesUtils.responseNoBody(err, resMetaHeaders,
return routesUtils.responseNoBody(err, corsHeaders,
response, 200, log);
});
}

View File

@ -101,11 +101,19 @@ function errorXMLResponse(errCode, response, log, corsHeaders) {
log.addDefaultFields({
bytesSent,
});
if (corsHeaders) {
// eslint-disable-next-line no-param-reassign
corsHeaders['Content-Type'] = 'application/xml';
// eslint-disable-next-line no-param-reassign
corsHeaders['Content-Length'] = xmlStr.length;
}
setCommonResponseHeaders(corsHeaders, response, log);
response.writeHead(errCode.code, { 'Content-type': 'application/xml' });
return response.end(xmlStr, 'utf8', () => {
log.end().info('responded with error XML', {
httpCode: response.statusCode,
xmlStr,
corsHeaders,
});
});
}

View File

@ -1,7 +1,7 @@
import assert from 'assert';
import async from 'async';
import { errors } from 'arsenal';
import { errors, versioning } from 'arsenal';
import BucketInfo from './metadata/BucketInfo';
import bucketShield from './api/apiUtils/bucket/bucketShield';
@ -14,10 +14,11 @@ import metadata from './metadata/wrapper';
import { logger } from './utilities/logger';
import removeAWSChunked from './api/apiUtils/object/removeAWSChunked';
const VID_INF = versioning.VersionID.VID_INF;
const usersBucket = constants.usersBucket;
const oldUsersBucket = constants.oldUsersBucket;
export default {
getService(authInfo, request, log, splitter, cb, overrideUserbucket) {
const canonicalID = authInfo.getCanonicalID();
@ -99,8 +100,12 @@ export default {
return cb(null, bucket, null);
});
}
return metadata.getBucketAndObjectMD(bucketName, objectKey, {}, log,
(err, data) => {
let versionId = params.versionId;
if (versionId === 'null') {
versionId = undefined;
}
return metadata.getBucketAndObjectMD(bucketName, objectKey,
{ versionId }, log, (err, data) => {
if (err) {
log.debug('metadata get failed', { error: err });
return cb(err);
@ -130,6 +135,18 @@ export default {
log.trace('Bucket found', { bucketName });
return cb(null, bucket, null);
}
if (params.versionId === 'null') {
if (obj.nullVersionId && !obj.isNull) {
log.debug('null version exists, get the null version');
params.versionId = obj.nullVersionId; // eslint-disable-line
return this.metadataValidateAuthorization(params, cb);
}
if (obj.versionId && !obj.isNull) {
log.debug('null version does not exists');
return cb(null, bucket, null);
}
// otherwise the master version is the null version
}
// TODO: Add bucket policy and IAM checks
if (!isObjAuthorized(bucket, obj, requestType, canonicalID)) {
log.debug('access denied for user on object', { requestType });
@ -171,7 +188,7 @@ export default {
const { objectKey, authInfo, size, contentMD5, metaHeaders,
contentType, cacheControl, contentDisposition, contentEncoding,
expires, multipart, headers, overrideMetadata, log,
lastModifiedDate } = params;
lastModifiedDate, versioning, versionId } = params;
log.trace('storing object in metadata');
assert.strictEqual(typeof bucketName, 'string');
const omVal = {};
@ -196,7 +213,7 @@ export default {
omVal['last-modified'] = lastModifiedDate || new Date().toJSON();
omVal['content-md5'] = contentMD5;
omVal['x-amz-server-version-id'] = '';
// omVal['x-amz-server-version-id'] = '';
// TODO: Handle this as a utility function for all object puts
// similar to normalizing request but after checkAuth so
// string to sign is not impacted. This is GH Issue#89.
@ -215,7 +232,7 @@ export default {
}
// simple/no version. will expand once object versioning is introduced
omVal['x-amz-version-id'] = 'null';
// omVal['x-amz-version-id'] = 'null';
omVal.acl = {
Canned: 'private',
FULL_CONTROL: [],
@ -224,6 +241,20 @@ export default {
READ_ACP: [],
};
const options = {};
if (versioning) {
options.versioning = versioning;
}
if (versionId || versionId === '') {
options.versionId = versionId;
}
omVal.isNull = params.isNull;
omVal.nullVersionId = params.nullVersionId;
omVal.isDeleteMarker = params.isDeleteMarker;
if (versionId && versionId !== 'null') {
omVal.versionId = versionId;
}
// Store user provided metadata. TODO: limit size.
// For multipart upload this also serves to transfer
// over metadata originally sent with the initiation
@ -245,41 +276,50 @@ export default {
log.trace('object metadata', { omVal });
// If this is not the completion of a multipart upload
// parse the headers to get the ACL's if any
if (!multipart) {
const parseAclParams = {
headers,
resourceType: 'object',
acl: omVal.acl,
log,
};
log.trace('parsing acl from headers');
acl.parseAclFromHeaders(parseAclParams, (err, parsedACL) => {
if (err) {
log.debug('error parsing acl', { error: err });
return cb(err);
async.waterfall([
callback => {
if (multipart) {
return callback();
}
omVal.acl = parsedACL;
metadata.putObjectMD(bucketName, objectKey, omVal, {}, log,
err => {
const parseAclParams = {
headers,
resourceType: 'object',
acl: omVal.acl,
log,
};
log.trace('parsing acl from headers');
acl.parseAclFromHeaders(parseAclParams, (err, parsedACL) => {
if (err) {
log.error('error from metadata', { error: err });
return cb(err);
log.warn('error parsing acl', { error: err });
return callback(err);
}
log.trace('object successfully stored in metadata');
return cb(err, contentMD5);
omVal.acl = parsedACL;
return callback();
});
return undefined;
});
} else {
metadata.putObjectMD(bucketName, objectKey, omVal, {}, log, err => {
if (err) {
log.error('error from metadata', { error: err });
return cb(err);
return null;
},
callback => metadata.putObjectMD(bucketName, objectKey, omVal,
options, log, callback),
], (err, data) => {
if (err) {
log.error('error from metadata', { error: err });
return cb(err);
}
log.trace('object successfully stored in metadata');
// for versioning only, other features need to process their data
let versionId = undefined;
if (data) {
if (params.isNull && params.isDeleteMarker) {
versionId = 'null';
// TODO: check if for version specific PUT request we want to
// return 'null' for versionId
} else if (!params.isNull) {
versionId = JSON.parse(data).versionId;
}
log.trace('object successfully stored in metadata');
return cb(err, contentMD5);
});
}
}
return cb(err, { contentMD5, versionId });
});
},
/**
@ -287,37 +327,31 @@ export default {
* @param {string} bucketName - bucket in which objectMD is stored
* @param {object} objectMD - object's metadata
* @param {string} objectKey - object key name
* @param {object} options - other instructions, such as { versionId } to
* delete a specific version of the object
* @param {Log} log - logger instance
* @param {function} cb - callback from async.waterfall in objectGet
* @return {undefined}
*/
deleteObject(bucketName, objectMD, objectKey, log, cb) {
deleteObject(bucketName, objectMD, objectKey, options, log, cb) {
log.trace('deleting object from bucket');
assert.strictEqual(typeof bucketName, 'string');
assert.strictEqual(typeof objectMD, 'object');
if (objectMD['x-amz-version-id'] === 'null') {
log.trace('object identified as non-versioned');
// non-versioned buckets
log.trace('deleteObject: deleting non-versioned object');
return metadata.deleteObjectMD(bucketName, objectKey, {}, log,
err => {
if (err) {
return cb(err);
}
cb();
log.trace('deleteObject: metadata delete OK');
const deleteLog = logger.newRequestLogger();
if (objectMD.location === null) {
return undefined;
} else if (!Array.isArray(objectMD.location)) {
return data.delete(objectMD.location, deleteLog);
}
return data.batchDelete(objectMD.location, deleteLog);
});
}
// versioning
log.debug('deleteObject: versioning not fully implemented');
return metadata.deleteObjectMD(bucketName, objectKey, {}, log, cb);
return metadata.deleteObjectMD(bucketName, objectKey, options, log,
(err, res) => {
if (err) {
return cb(err, res);
}
cb(null, res); // this is smart
log.trace('deleteObject: metadata delete OK');
const deleteLog = logger.newRequestLogger();
if (objectMD.location === null) {
return undefined;
} else if (!Array.isArray(objectMD.location)) {
return data.delete(objectMD.location, deleteLog);
}
return data.batchDelete(objectMD.location, deleteLog);
});
},
/**
@ -735,4 +769,131 @@ export default {
metadata.deleteObjectMD(mpuBucketName, key, {}, log, callback);
}, err => cb(err));
},
versioningPreprocessing(bucketName, bucketMD, objectKey, objMD,
reqVersionId, log, callback) {
const options = {};
// bucket is not versioning enabled
if (!bucketMD.getVersioningConfiguration()) {
options.deleteData = true;
return callback(null, options);
}
// bucket is versioning enabled
const mstVersionId = objMD ? objMD.versionId : undefined;
const mstIsNull = objMD ? objMD.isNull : false;
const vstat = bucketMD.getVersioningConfiguration().Status;
if (!reqVersionId) {
// non-version-specific versioning operation
if (mstVersionId === undefined || mstIsNull) {
// object does not exist or is not versioned (before versioning)
if (vstat === 'Suspended') {
// versioning is suspended, overwrite the existing version
options.versionId = '';
options.isNull = true;
options.deleteData = true;
return callback(null, options);
}
// versioning is enabled, create a new version
options.versioning = true;
if (objMD) {
// store master version in a new key
const versionId = mstIsNull ? mstVersionId : VID_INF;
objMD.versionId = versionId; // eslint-disable-line
objMD.isNull = true; // eslint-disable-line
options.nullVersionId = versionId;
return metadata.putObjectMD(bucketName, objectKey, objMD,
{ versionId }, log, err => callback(err, options));
}
return callback(null, options);
}
// master is versioned and is not a null version
const nullVersionId = objMD.nullVersionId;
if (vstat === 'Suspended') {
// versioning is suspended, overwrite the existing version
options.versionId = '';
options.isNull = true;
options.deleteNullVersionData = true;
if (nullVersionId === undefined) {
return callback(null, options);
}
return metadata.deleteObjectMD(bucketName, objectKey,
{ versionId: nullVersionId }, log,
err => callback(err, options));
}
// versioning is enabled, put the new version
options.versioning = true;
options.nullVersionId = nullVersionId;
return callback(null, options);
} else if (!mstVersionId) {
// version-specific versioning operation, master is not versioned
if (vstat === 'Suspended' || reqVersionId === 'null') {
// object does not exist or is not versioned (before versioning)
options.versionId = '';
options.isNull = true;
options.deleteData = true;
return callback(null, options);
}
// TODO check AWS behaviour
return callback(errors.BadRequest);
} else if (mstIsNull) {
// master is versioned and is a null version
if (reqVersionId === 'null') {
// overwrite the existing version, make new version null
options.versionId = '';
options.isNull = true;
options.deleteData = true;
return callback(null, options);
}
// TODO check AWS behaviour
options.versionId = reqVersionId;
options.deleteData = true;
return callback(null, options);
}
// master is versioned and is not a null version
options.versionId = reqVersionId;
options.deleteData = true;
return callback(null, options);
},
preprocessingVersioningDelete(bucketName, bucketMD, objectName, objectMD,
reqVersionId, log, callback) {
const options = {};
// bucket is not versioning enabled
if (!bucketMD.getVersioningConfiguration()) {
options.deleteData = true;
return callback(null, options);
}
// bucket is versioning enabled
if (reqVersionId && reqVersionId !== 'null') {
// deleting a specific version
options.deleteData = true;
options.versionId = reqVersionId;
return callback(null, options);
}
if (reqVersionId) {
// deleting the 'null' version if it exists
if (objectMD.versionId === undefined) {
// object is not versioned, deleting it
options.deleteData = true;
return callback(null, options);
}
if (objectMD.isNull) {
// master is the null version
options.deleteData = true;
options.versionId = objectMD.versionId;
return callback(null, options);
}
if (objectMD.nullVersionId) {
// null version exists, deleting it
options.deleteData = true;
options.versionId = objectMD.nullVersionId;
return callback(null, options);
}
// null version does not exist, no deletion
// TODO check AWS behaviour for no deletion (seems having no error)
return callback(errors.NoSuchKey);
}
// not deleting any specific version, making a delete marker instead
return callback(null, options);
},
};

View File

@ -315,7 +315,7 @@ utils.mapHeaders = function mapHeaders(headers, addHeaders) {
*/
utils.isUnsupportedQuery = function isUnsupportedQuery(queryObj) {
return Object.keys(queryObj)
.some(key => (constants.unsupportedQueries.indexOf(key) > -1));
.some(key => constants.unsupportedQueries[key]);
};
export default utils;

View File

@ -19,7 +19,7 @@
},
"homepage": "https://github.com/scality/S3#readme",
"dependencies": {
"arsenal": "scality/Arsenal",
"arsenal": "scality/Arsenal#ft/vsp",
"async": "~1.4.2",
"babel-core": "^6.5.2",
"babel-plugin-transform-es2015-destructuring": "^6.5.2",

View File

@ -0,0 +1,42 @@
import async from 'async';
import { S3 } from 'aws-sdk';
import getConfig from '../../test/support/config';
const config = getConfig('default', { signatureVersion: 'v4' });
const s3 = new S3(config);
export const constants = {
versioningEnabled: { Status: 'Enabled' },
versioningSuspended: { Status: 'Suspended' },
};
function _deleteVersionList(versionList, bucket, callback) {
async.each(versionList, (versionInfo, cb) => {
const versionId = versionInfo.VersionId;
const params = { Bucket: bucket, Key: versionInfo.Key,
VersionId: versionId };
s3.deleteObject(params, cb);
}, callback);
}
export function removeAllVersions(params, callback) {
const bucket = params.Bucket;
async.waterfall([
cb => s3.listObjectVersions(params, cb),
(data, cb) => _deleteVersionList(data.DeleteMarkers, bucket,
err => cb(err, data)),
(data, cb) => _deleteVersionList(data.Versions, bucket,
err => cb(err, data)),
(data, cb) => {
if (data.IsTruncated) {
const params = {
Bucket: bucket,
KeyMarker: data.NextKeyMarker,
VersionIdMarker: data.NextVersionIdMarker,
};
return removeAllVersions(params, cb);
}
return cb();
},
], callback);
}

View File

@ -8,7 +8,11 @@ import getConfig from '../support/config';
const bucket = `bigmpu-test-bucket-${Date.now()}`;
const key = 'mpuKey';
const body = 'abc';
const eTag = '900150983cd24fb0d6963f7d28e17f72';
const partCount = 10000;
const eTag = require('crypto').createHash('md5').update(body).digest('hex');
const finalETag = require('crypto').createHash('md5')
.update(Buffer.from(eTag.repeat(partCount), 'hex').toString('binary'),
'binary').digest('hex');
function uploadPart(n, uploadId, s3, next) {
const params = {
@ -63,7 +67,7 @@ describe('large mpu', function tester() {
// will fail on AWS because parts too small
itSkipIfAWS('should intiate, put parts and complete mpu ' +
'with 10,000 parts', done => {
`with ${partCount} parts`, done => {
process.stdout.write('***Running large MPU test***\n');
let uploadId;
return waterfall([
@ -78,14 +82,14 @@ describe('large mpu', function tester() {
}),
next => {
process.stdout.write('putting parts');
return timesLimit(10000, 20, (n, cb) =>
return timesLimit(partCount, 20, (n, cb) =>
uploadPart(n, uploadId, s3, cb), err =>
next(err)
);
},
next => {
const parts = [];
for (let i = 1; i <= 10000; i++) {
for (let i = 1; i <= partCount; i++) {
parts.push({
ETag: eTag,
PartNumber: i,
@ -114,8 +118,8 @@ describe('large mpu', function tester() {
if (err) {
return next(err);
}
assert.strictEqual(data.ETag, '"e0c3d6b4446bf8f97' +
'9c50df6d79e9e0a-10000"');
assert.strictEqual(data.ETag,
`"${finalETag}-${partCount}"`);
return next();
});
},

View File

@ -1,7 +1,10 @@
import assert from 'assert';
import withV4 from '../support/withV4';
import BucketUtility from '../../lib/utility/bucket-util';
import {
constants,
removeAllVersions,
} from '../../lib/utility/versioning-util.js';
const date = Date.now();
const bucket = `completempu${date}`;
@ -14,51 +17,14 @@ function checkNoError(err) {
`Expected success, got error ${JSON.stringify(err)}`);
}
const testing = process.env.VERSIONING === 'no' ? describe.skip : describe;
describe('Complete MPU', () => {
withV4(sigCfg => {
let bucketUtil;
let s3;
let uploadId;
let firstEtag;
const bucketUtil = new BucketUtility('default', sigCfg);
const s3 = bucketUtil.s3;
beforeEach(() => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.createBucketAsync({ Bucket: bucket })
.then(() => s3.createMultipartUploadAsync({
Bucket: bucket, Key: key }))
.then(res => {
uploadId = res.UploadId;
return s3.uploadPartAsync({ Bucket: bucket, Key: key,
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
})
.then(res => {
firstEtag = res.ETag;
return firstEtag;
})
.then(() => s3.uploadPartAsync({ Bucket: bucket, Key: key,
PartNumber: 2, UploadId: uploadId, Body: bodySecondPart }))
.catch(err => {
process.stdout.write(`Error in beforeEach: ${err}\n`);
throw err;
});
});
afterEach(() => {
process.stdout.write('Emptying bucket');
return bucketUtil.empty(bucket)
.then(() => {
process.stdout.write('Deleting bucket');
return bucketUtil.deleteOne(bucket);
})
.catch(err => {
process.stdout.write('Error in afterEach');
throw err;
});
});
it('should complete an MPU with fewer parts than were ' +
'originally put', done => {
function _completeMpuAndCheckVid(uploadId, firstEtag, expectedVid, cb) {
s3.completeMultipartUpload({
Bucket: bucket,
Key: key,
@ -70,6 +36,12 @@ describe('Complete MPU', () => {
checkNoError(err);
// to show that the mpu completed with just 1 part
assert.strictEqual(data.ETag.slice(-3), '-1"');
const versionId = data.VersionId;
if (expectedVid) {
assert.notEqual(versionId, undefined);
} else {
assert.strictEqual(versionId, expectedVid);
}
return s3.getObject({
Bucket: bucket,
Key: key,
@ -78,9 +50,115 @@ describe('Complete MPU', () => {
checkNoError(err);
// to show that data in completed key is just first part
assert.strictEqual(data.ContentLength, '10');
done();
if (versionId) {
assert.strictEqual(data.VersionId, versionId);
}
cb();
});
});
}
afterEach(done => {
removeAllVersions({ Bucket: bucket }, err => {
if (err) {
return done(err);
}
return s3.deleteBucket({ Bucket: bucket }, done);
});
});
describe('on bucket without versioning configuration', () => {
let uploadId;
let firstEtag;
beforeEach(() => s3.createBucketAsync({ Bucket: bucket })
.then(() => s3.createMultipartUploadAsync({
Bucket: bucket, Key: key }))
.then(res => {
uploadId = res.UploadId;
return s3.uploadPartAsync({ Bucket: bucket, Key: key,
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
})
.then(res => {
firstEtag = res.ETag;
return firstEtag;
})
.then(() => s3.uploadPartAsync({ Bucket: bucket, Key: key,
PartNumber: 2, UploadId: uploadId, Body: bodySecondPart }))
.catch(err => {
process.stdout.write(`Error in beforeEach: ${err}\n`);
throw err;
})
);
it('should complete an MPU with fewer parts than were ' +
'originally put without returning a version id', done => {
_completeMpuAndCheckVid(uploadId, firstEtag, undefined, done);
});
});
testing('on bucket with enabled versioning', () => {
let uploadId;
let firstEtag;
beforeEach(() => s3.createBucketAsync({ Bucket: bucket })
.then(() => s3.putBucketVersioningAsync({ Bucket: bucket,
VersioningConfiguration: constants.versioningEnabled }))
.then(() => s3.createMultipartUploadAsync({
Bucket: bucket, Key: key }))
.then(res => {
uploadId = res.UploadId;
return s3.uploadPartAsync({ Bucket: bucket, Key: key,
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
})
.then(res => {
firstEtag = res.ETag;
return firstEtag;
})
.then(() => s3.uploadPartAsync({ Bucket: bucket, Key: key,
PartNumber: 2, UploadId: uploadId, Body: bodySecondPart }))
.catch(err => {
process.stdout.write(`Error in beforeEach: ${err}\n`);
throw err;
})
);
it('should complete an MPU with fewer parts than were ' +
'originally put and return a version id', done => {
_completeMpuAndCheckVid(uploadId, firstEtag, true, done);
});
});
testing('on bucket with suspended versioning', () => {
let uploadId;
let firstEtag;
beforeEach(() => s3.createBucketAsync({ Bucket: bucket })
.then(() => s3.putBucketVersioningAsync({ Bucket: bucket,
VersioningConfiguration: constants.versioningSuspended }))
.then(() => s3.createMultipartUploadAsync({
Bucket: bucket, Key: key }))
.then(res => {
uploadId = res.UploadId;
return s3.uploadPartAsync({ Bucket: bucket, Key: key,
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
})
.then(res => {
firstEtag = res.ETag;
return firstEtag;
})
.then(() => s3.uploadPartAsync({ Bucket: bucket, Key: key,
PartNumber: 2, UploadId: uploadId, Body: bodySecondPart }))
.catch(err => {
process.stdout.write(`Error in beforeEach: ${err}\n`);
throw err;
})
);
it('should complete an MPU with fewer parts than were ' +
'originally put and should not return a version id', done => {
_completeMpuAndCheckVid(uploadId, firstEtag, undefined, done);
});
});
});
});

View File

@ -6,6 +6,7 @@ import getConfig from '../support/config';
import methodRequest from '../../lib/utility/cors-util';
import { generateCorsParams } from '../../lib/utility/cors-util';
import { WebsiteConfigTester } from '../../lib/utility/website-util';
import { removeAllVersions } from '../../lib/utility/versioning-util';
const config = getConfig('default', { signatureVersion: 'v4' });
const s3 = new S3(config);
@ -450,7 +451,7 @@ describe('Cross Origin Resource Sharing requests', () => {
beforeEach(done => s3.putBucketCors(corsParams, done));
afterEach(done => {
s3.deleteObject({ Bucket: bucket, Key: objectKey }, err => {
removeAllVersions({ Bucket: bucket }, err => {
if (err && err.code !== 'NoSuchKey' &&
err.code !== 'NoSuchBucket') {
process.stdout.write(`Unexpected err in afterEach: ${err}`);

View File

@ -3,7 +3,7 @@ import assert from 'assert';
import withV4 from '../support/withV4';
import BucketUtility from '../../lib/utility/bucket-util';
const bucket = 'object-test-mpu';
const bucket = `object-test-mpu-${Date.now()}`;
const objectKey = 'toAbort&<>"\'';
// Get the expected object of listMPU API.

View File

@ -0,0 +1,399 @@
import assert from 'assert';
import { S3 } from 'aws-sdk';
import async from 'async';
import getConfig from '../support/config';
const bucket = `versioning-bucket-${Date.now()}`;
const testing = process.env.VERSIONING === 'no' ?
describe.skip : describe;
testing('listObject - Delimiter version', function testSuite() {
this.timeout(600000);
let s3 = undefined;
function _deleteVersionList(versionList, bucket, callback) {
async.each(versionList, (versionInfo, cb) => {
const versionId = versionInfo.VersionId;
const params = { Bucket: bucket, Key: versionInfo.Key,
VersionId: versionId };
s3.deleteObject(params, cb);
}, callback);
}
function _removeAllVersions(bucket, callback) {
return s3.listObjectVersions({ Bucket: bucket }, (err, data) => {
if (err && err.NoSuchBucket) {
return callback();
} else if (err) {
return callback(err);
}
return _deleteVersionList(data.DeleteMarkers, bucket, err => {
if (err) {
return callback(err);
}
return _deleteVersionList(data.Versions, bucket, callback);
});
});
}
// setup test
before(done => {
const config = getConfig('default', { signatureVersion: 'v4' });
s3 = new S3(config);
s3.createBucket({ Bucket: bucket }, done);
});
// delete bucket after testing
after(done => {
_removeAllVersions(bucket, err => {
if (err) {
return done(err);
}
return s3.deleteBucket({ Bucket: bucket }, err => {
assert.strictEqual(err, null,
`Error deleting bucket: ${err}`);
return done();
});
});
});
let versioning = false;
const objects = [
{ name: 'notes/summer/august/1.txt', value: 'foo', isNull: true },
{ name: 'notes/year.txt', value: 'foo', isNull: true },
{ name: 'notes/yore.rs', value: 'foo', isNull: true },
{ name: 'notes/zaphod/Beeblebrox.txt', value: 'foo', isNull: true },
{ name: 'Pâtisserie=中文-español-English', value: 'foo' },
{ name: 'Pâtisserie=中文-español-English', value: 'bar' },
{ name: 'notes/spring/1.txt', value: 'qux' },
{ name: 'notes/spring/1.txt', value: 'foo' },
{ name: 'notes/spring/1.txt', value: 'bar' },
{ name: 'notes/spring/2.txt', value: 'foo' },
{ name: 'notes/spring/2.txt', value: null },
{ name: 'notes/spring/march/1.txt', value: 'foo' },
{ name: 'notes/spring/march/1.txt', value: 'bar', isNull: true },
{ name: 'notes/summer/1.txt', value: 'foo' },
{ name: 'notes/summer/1.txt', value: 'bar' },
{ name: 'notes/summer/2.txt', value: 'bar' },
{ name: 'notes/summer/4.txt', value: null },
{ name: 'notes/summer/4.txt', value: null },
{ name: 'notes/summer/4.txt', value: null },
{ name: 'notes/summer/444.txt', value: null },
{ name: 'notes/summer/44444.txt', value: null },
];
it('put objects inside bucket', done => {
async.eachSeries(objects, (obj, next) => {
async.waterfall([
next => {
if (!versioning && obj.isNull !== true) {
const params = {
Bucket: bucket,
VersioningConfiguration: {
Status: 'Enabled',
},
};
versioning = true;
return s3.putBucketVersioning(params, err => next(err));
} else if (versioning && obj.isNull === true) {
const params = {
Bucket: bucket,
VersioningConfiguration: {
Status: 'Suspended',
},
};
versioning = false;
return s3.putBucketVersioning(params, err => next(err));
}
return next();
},
next => {
if (obj.value === null) {
return s3.deleteObject({
Bucket: bucket,
Key: obj.name,
}, function test(err) {
const headers = this.httpResponse.headers;
assert.strictEqual(headers['x-amz-delete-marker'],
'true');
// eslint-disable-next-line no-param-reassign
obj.versionId = headers['x-amz-version-id'];
return next(err);
});
}
return s3.putObject({
Bucket: bucket,
Key: obj.name,
Body: obj.value,
}, (err, res) => {
if (err) {
return next(err);
}
// eslint-disable-next-line no-param-reassign
obj.versionId = res.VersionId || 'null';
return next();
});
},
], err => next(err));
}, err => done(err));
});
[
{
name: 'basic listing',
params: {},
expectedResult: objects,
commonPrefix: [],
isTruncated: false,
nextKeyMarker: undefined,
nextVersionIdMarker: undefined,
},
{
name: 'with valid key marker',
params: { KeyMarker: 'notes/spring/1.txt' },
expectedResult: [
objects[0],
objects[1],
objects[2],
objects[3],
objects[9],
objects[10],
objects[11],
objects[12],
objects[13],
objects[14],
objects[15],
objects[16],
objects[17],
objects[18],
objects[19],
objects[20],
],
commonPrefix: [],
isTruncated: false,
nextKeyMarker: undefined,
nextVersionIdMarker: undefined,
},
{
name: 'with bad key marker',
params: { KeyMarker: 'zzzz', Delimiter: '/' },
expectedResult: [],
commonPrefix: [],
isTruncated: false,
nextKeyMarker: undefined,
nextVersionIdMarker: undefined,
},
{
name: 'with maxKeys',
params: { MaxKeys: 3 },
expectedResult: [
objects[4],
objects[5],
objects[8],
],
commonPrefix: [],
isTruncated: true,
nextKeyMarker: objects[8].name,
nextVersionIdMarker: objects[8],
},
{
name: 'with big maxKeys',
params: { MaxKeys: 15000 },
expectedResult: objects,
commonPrefix: [],
isTruncated: false,
nextKeyMarker: undefined,
nextVersionIdMarker: undefined,
},
{
name: 'with delimiter',
params: { Delimiter: '/' },
expectedResult: objects.slice(4, 6),
commonPrefix: ['notes/'],
isTruncated: false,
nextKeyMarker: undefined,
nextVersionIdMarker: undefined,
},
{
name: 'with long delimiter',
params: { Delimiter: 'notes/summer' },
expectedResult: objects.filter(obj =>
obj.name.indexOf('notes/summer') < 0),
commonPrefix: ['notes/summer'],
isTruncated: false,
nextKeyMarker: undefined,
nextVersionIdMarker: undefined,
},
{
name: 'bad key marker and good prefix',
params: {
Delimiter: '/',
Prefix: 'notes/summer/',
KeyMarker: 'notes/summer0',
},
expectedResult: [],
commonPrefix: [],
isTruncated: false,
nextKeyMarker: undefined,
nextVersionIdMarker: undefined,
},
{
name: 'delimiter and prefix (related to #147)',
params: { Delimiter: '/', Prefix: 'notes/' },
expectedResult: [
objects[1],
objects[2],
],
commonPrefix: [
'notes/spring/',
'notes/summer/',
'notes/zaphod/',
],
isTruncated: false,
nextKeyMarker: undefined,
nextVersionIdMarker: undefined,
},
{
name: 'delimiter, prefix and marker (related to #147)',
params: {
Delimiter: '/',
Prefix: 'notes/',
KeyMarker: 'notes/year.txt',
},
expectedResult: [objects[2]],
commonPrefix: ['notes/zaphod/'],
isTruncated: false,
nextKeyMarker: undefined,
nextVersionIdMarker: undefined,
},
{
name: 'all parameters 1/5',
params: {
Delimiter: '/',
Prefix: 'notes/',
KeyMarker: 'notes/',
MaxKeys: 1,
},
expectedResult: [],
commonPrefix: ['notes/spring/'],
isTruncated: true,
nextKeyMarker: 'notes/spring/',
nextVersionIdMarker: undefined,
},
{
name: 'all parameters 2/5',
params: {
Delimiter: '/',
Prefix: 'notes/',
KeyMarker: 'notes/spring/',
MaxKeys: 1,
},
expectedResult: [],
commonPrefix: ['notes/summer/'],
isTruncated: true,
nextKeyMarker: 'notes/summer/',
nextVersionIdMarker: undefined,
},
{
name: 'all parameters 3/5',
params: {
Delimiter: '/',
Prefix: 'notes/',
KeyMarker: 'notes/summer/',
MaxKeys: 1,
},
expectedResult: [objects[1]],
commonPrefix: [],
isTruncated: true,
nextKeyMarker: objects[1].name,
nextVersionIdMarker: objects[1],
},
{
name: 'all parameters 4/5',
params: {
Delimiter: '/',
Prefix: 'notes/',
KeyMarker: 'notes/year.txt',
MaxKeys: 1,
},
expectedResult: [objects[2]],
commonPrefix: [],
isTruncated: true,
nextKeyMarker: objects[2].name,
nextVersionIdMarker: objects[2],
},
{
name: 'all parameters 5/5',
params: {
Delimiter: '/',
Prefix: 'notes/',
KeyMarker: 'notes/yore.rs',
MaxKeys: 1,
},
expectedResult: [],
commonPrefix: ['notes/zaphod/'],
isTruncated: false,
nextKeyMarker: undefined,
nextVersionIdMarker: undefined,
},
].forEach(test => {
it(test.name, done => {
const expectedResult = test.expectedResult;
s3.listObjectVersions(
Object.assign({ Bucket: bucket }, test.params),
(err, res) => {
if (err) {
return done(err);
}
res.Versions.forEach(result => {
const item = expectedResult.find(obj => {
if (obj.name === result.Key &&
obj.versionId === result.VersionId &&
obj.value !== null) {
return true;
}
return false;
});
if (!item) {
throw new Error(
`listing fail, unexpected key ${result.Key} ` +
`with version ${result.VersionId}`);
}
});
res.DeleteMarkers.forEach(result => {
const item = expectedResult.find(obj => {
if (obj.name === result.Key &&
obj.versionId === result.VersionId &&
obj.value === null) {
return true;
}
return false;
});
if (!item) {
throw new Error(
`listing fail, unexpected key ${result.Key} ` +
`with version ${result.VersionId}`);
}
});
res.CommonPrefixes.forEach(cp => {
if (!test.commonPrefix.find(
item => item === cp.Prefix)) {
throw new Error(
`listing fail, unexpected prefix ${cp.Prefix}`);
}
});
assert.strictEqual(res.IsTruncated, test.isTruncated);
assert.strictEqual(res.NextKeyMarker, test.nextKeyMarker);
if (!test.nextVersionIdMarker) {
// eslint-disable-next-line no-param-reassign
test.nextVersionIdMarker = {};
}
assert.strictEqual(res.NextVersionIdMarker,
test.nextVersionIdMarker.versionId);
return done();
});
});
});
});

View File

@ -0,0 +1,137 @@
import assert from 'assert';
import async from 'async';
import BucketUtility from '../../lib/utility/bucket-util';
const bucketName = `multi-object-delete-${Date.now()}`;
const key = 'key';
function checkNoError(err) {
assert.equal(err, null,
`Expected success, got error ${JSON.stringify(err)}`);
}
function sortList(list) {
return list.sort((a, b) => {
if (a.Key > b.Key) {
return 1;
}
if (a.Key < b.Key) {
return -1;
}
return 0;
});
}
const testing = process.env.VERSIONING === 'no' ? describe.skip : describe;
testing('Multi-Object Versioning Delete Success', function success() {
this.timeout(360000);
let bucketUtil;
let s3;
let objectsRes;
beforeEach(done => {
bucketUtil = new BucketUtility('default', {
signatureVersion: 'v4',
});
s3 = bucketUtil.s3;
async.waterfall([
next => s3.createBucket({ Bucket: bucketName }, err => next(err)),
next => s3.putBucketVersioning({
Bucket: bucketName,
VersioningConfiguration: {
Status: 'Enabled',
},
}, err => next(err)),
next => {
const objects = [];
for (let i = 1; i < 1001; i ++) {
objects.push(`${key}${i}`);
}
async.mapLimit(objects, 20, (key, next) => {
s3.putObject({
Bucket: bucketName,
Key: key,
Body: 'somebody',
}, (err, res) => {
if (err) {
return next(err);
}
// eslint-disable-next-line no-param-reassign
res.Key = key;
return next(null, res);
});
}, (err, results) => {
if (err) {
return next(err);
}
objectsRes = results;
return next();
});
},
], err => done(err));
});
afterEach(() => s3.deleteBucketAsync({ Bucket: bucketName }));
it('should batch delete 1000 objects quietly', () => {
const objects = objectsRes.slice(0, 1000).map(obj =>
({ Key: obj.Key, VersionId: obj.VersionId }));
return s3.deleteObjectsAsync({
Bucket: bucketName,
Delete: {
Objects: objects,
Quiet: true,
},
}).then(res => {
assert.strictEqual(res.Deleted.length, 0);
assert.strictEqual(res.Errors.length, 0);
}).catch(err => {
checkNoError(err);
});
});
it('should batch delete 1000 objects', () => {
const objects = objectsRes.slice(0, 1000).map(obj =>
({ Key: obj.Key, VersionId: obj.VersionId }));
return s3.deleteObjectsAsync({
Bucket: bucketName,
Delete: {
Objects: objects,
Quiet: false,
},
}).then(res => {
assert.strictEqual(res.Deleted.length, 1000);
// order of returned objects not sorted
assert.deepStrictEqual(sortList(res.Deleted), sortList(objects));
assert.strictEqual(res.Errors.length, 0);
}).catch(err => {
checkNoError(err);
});
});
it('should not send back error if one versionId is invalid', () => {
const objects = objectsRes.slice(0, 1000).map(obj =>
({ Key: obj.Key, VersionId: obj.VersionId }));
const prevVersion = objects[0].VersionId;
objects[0].VersionId = 'invalid-version-id';
return s3.deleteObjectsAsync({
Bucket: bucketName,
Delete: {
Objects: objects,
},
}).then(res =>
s3.deleteObjectAsync({
Bucket: bucketName,
Key: objects[0].Key,
VersionId: prevVersion,
}).then(() => {
assert.strictEqual(res.Deleted.length, 999);
assert.strictEqual(res.Errors.length, 1);
assert.strictEqual(res.Errors[0].Code, 'NoSuchVersion');
})
).catch(err => {
checkNoError(err);
});
});
});

View File

@ -0,0 +1,270 @@
import assert from 'assert';
import { S3 } from 'aws-sdk';
import async from 'async';
import { versioning } from 'arsenal';
import {
removeAllVersions,
constants,
} from '../../lib/utility/versioning-util.js';
import getConfig from '../support/config';
const config = getConfig('default', { signatureVersion: 'v4' });
const s3 = new S3(config);
const counter = 100;
let bucket;
const key = '/';
const fakeId = 'fakeId';
const VID_INF = versioning.VersionID.VID_INF;
const nonExistingId = versioning.VersionID
.encrypt(`${VID_INF.slice(VID_INF.length - 1)}7`);
function _assertNoError(err, desc) {
assert.strictEqual(err, null, `Unexpected err ${desc}: ${err}`);
}
// need a wrapper because sdk apparently does not include version id in
// exposed data object for put/get acl methods
function _wrapDataObject(method, params, callback) {
const request = s3[method](params, (err, data) => {
const responseHeaders = request.response.httpResponse.headers;
if (err) {
return callback(err);
}
const dataObj = Object.assign({
VersionId: responseHeaders['x-amz-version-id'],
}, data);
return callback(null, dataObj);
});
}
function _getObjectAcl(params, callback) {
_wrapDataObject('getObjectAcl', params, callback);
}
function _putObjectAcl(params, callback) {
_wrapDataObject('putObjectAcl', params, callback);
}
function _putAndGetAcl(cannedAcl, versionId, putResVerId, getResVerId, cb) {
const params = {
Bucket: bucket,
Key: key,
ACL: cannedAcl,
VersionId: versionId,
};
_putObjectAcl(params, (err, data) => {
_assertNoError(err, `putting object acl with version id: ${versionId}`);
assert.strictEqual(data.VersionId, putResVerId,
`expected version id '${putResVerId}' in putacl res headers, ` +
`got '${data.VersionId}' instead`);
delete params.ACL;
_getObjectAcl(params, (err, data) => {
_assertNoError(err,
`getting object acl with version id: ${versionId}`);
assert.strictEqual(data.VersionId, getResVerId,
`expected version id '${getResVerId}' in getacl res headers, ` +
`got '${data.VersionId}' instead`);
assert.strictEqual(data.Grants.length, 2);
cb();
});
});
}
function _testBehaviorVersioningEnabledOrSuspended(versionIds) {
it('non-version specific put and get ACL should target latest ' +
'version AND return version ID in response headers', done => {
const latestVersion = versionIds[versionIds.length - 1];
_putAndGetAcl('public-read', undefined, latestVersion,
latestVersion, done);
});
it('version specific put and get ACL should return version ID ' +
'in response headers', done => {
const firstVersion = versionIds[0];
_putAndGetAcl('public-read', firstVersion, firstVersion,
firstVersion, done);
});
it('version specific put and get ACL (version id = "null") ' +
'should return version ID ("null") in response headers', done => {
_putAndGetAcl('public-read', 'null', 'null', 'null', done);
});
}
const testing = process.env.VERSIONING === 'no' ? describe.skip : describe.skip;
testing('put and get object acl with versioning', function testSuite() {
this.timeout(600000);
beforeEach(done => {
bucket = `versioning-bucket-acl-${Date.now()}`;
s3.createBucket({ Bucket: bucket }, done);
});
afterEach(done => {
removeAllVersions({ Bucket: bucket }, err => {
if (err) {
return done(err);
}
return s3.deleteBucket({ Bucket: bucket }, done);
});
});
describe('in a bucket without versioning configuration', () => {
beforeEach(done => {
s3.putObject({ Bucket: bucket, Key: key }, done);
});
it('should not return version id for non-version specific ' +
'put and get ACL', done => {
_putAndGetAcl('public-read', undefined, undefined, undefined, done);
});
it('should not return version id for version specific ' +
'put and get ACL (version id = "null")', done => {
_putAndGetAcl('public-read', 'null', undefined, undefined, done);
});
it('should return NoSuchVersion if attempting to put acl for ' +
'non-existing version', done => {
const params = { Bucket: bucket, Key: key, VersionId: nonExistingId,
ACL: 'private' };
s3.putObjectAcl(params, err => {
assert(err, 'Expected err but did not find one');
assert.strictEqual(err.code, 'NoSuchVersion');
assert.strictEqual(err.statusCode, 404);
done();
});
});
it('should return InvalidArgument if attempting to put acl for ' +
'invalid id', done => {
const params = { Bucket: bucket, Key: key, VersionId: fakeId,
ACL: 'private' };
s3.putObjectAcl(params, err => {
assert(err, 'Expected err but did not find one');
assert.strictEqual(err.code, 'InvalidArgument');
assert.strictEqual(err.statusCode, 400);
done();
});
});
it('should return NoSuchVersion if attempting to get acl for ' +
'non-existing version', done => {
const params = { Bucket: bucket, Key: key,
VersionId: nonExistingId };
s3.getObjectAcl(params, err => {
assert(err, 'Expected err but did not find one');
assert.strictEqual(err.code, 'NoSuchVersion');
assert.strictEqual(err.statusCode, 404);
done();
});
});
it('should return InvalidArgument if attempting to get acl for ' +
'invalid id', done => {
const params = { Bucket: bucket, Key: key, VersionId: fakeId };
s3.getObjectAcl(params, err => {
assert(err, 'Expected err but did not find one');
assert.strictEqual(err.code, 'InvalidArgument');
assert.strictEqual(err.statusCode, 400);
done();
});
});
});
describe('on a version-enabled bucket with non-versioned object', () => {
const versionIds = [];
beforeEach(done => {
const params = { Bucket: bucket, Key: key };
async.waterfall([
callback => s3.putObject(params, err => callback(err)),
callback => s3.putBucketVersioning({
Bucket: bucket,
VersioningConfiguration: constants.versioningEnabled,
}, err => callback(err)),
], done);
});
afterEach(done => {
// cleanup versionIds just in case
versionIds.length = 0;
done();
});
describe('before putting new versions', () => {
it('non-version specific put and get ACL should now ' +
'return version ID ("null") in response headers', done => {
_putAndGetAcl('public-read', undefined, 'null', 'null', done);
});
});
describe('after putting new versions', () => {
beforeEach(done => {
const params = { Bucket: bucket, Key: key };
async.timesSeries(counter, (i, next) =>
s3.putObject(params, (err, data) => {
_assertNoError(err, `putting version #${i}`);
versionIds.push(data.VersionId);
next(err);
}), done);
});
_testBehaviorVersioningEnabledOrSuspended(versionIds);
});
});
describe('on version-suspended bucket with non-versioned object', () => {
const versionIds = [];
beforeEach(done => {
const params = { Bucket: bucket, Key: key };
async.waterfall([
callback => s3.putObject(params, err => callback(err)),
callback => s3.putBucketVersioning({
Bucket: bucket,
VersioningConfiguration: constants.versioningSuspended,
}, err => callback(err)),
], done);
});
afterEach(done => {
// cleanup versionIds just in case
versionIds.length = 0;
done();
});
describe('before putting new versions', () => {
it('non-version specific put and get ACL should still ' +
'return version ID ("null") in response headers', done => {
_putAndGetAcl('public-read', undefined, 'null', 'null', done);
});
});
describe('after putting new versions', () => {
beforeEach(done => {
const params = { Bucket: bucket, Key: key };
async.waterfall([
callback => s3.putBucketVersioning({
Bucket: bucket,
VersioningConfiguration: constants.versioningEnabled,
}, err => callback(err)),
callback => async.timesSeries(counter, (i, next) =>
s3.putObject(params, (err, data) => {
_assertNoError(err, `putting version #${i}`);
versionIds.push(data.VersionId);
next(err);
}), err => callback(err)),
callback => s3.putBucketVersioning({
Bucket: bucket,
VersioningConfiguration: constants.versioningSuspended,
}, err => callback(err)),
], done);
});
_testBehaviorVersioningEnabledOrSuspended(versionIds);
});
});
});

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,533 @@
import assert from 'assert';
import { S3 } from 'aws-sdk';
import async from 'async';
import getConfig from '../support/config';
const bucket = `versioning-bucket-${Date.now()}`;
const key = 'anObject';
const testing = process.env.VERSIONING === 'no' ? describe.skip : describe;
testing('aws-node-sdk test delete object', function testSuite() {
this.timeout(600000);
let s3 = undefined;
let versionIds = undefined;
function _deleteVersionList(versionList, bucket, callback) {
async.each(versionList, (versionInfo, cb) => {
const versionId = versionInfo.VersionId;
const params = { Bucket: bucket, Key: versionInfo.Key,
VersionId: versionId };
s3.deleteObject(params, cb);
}, callback);
}
function _removeAllVersions(bucket, callback) {
return s3.listObjectVersions({ Bucket: bucket }, (err, data) => {
process.stdout.write(
'list object versions before deletion' +
`${JSON.stringify(data, undefined, '\t')}`);
if (err && err.NoSuchBucket) {
return callback();
} else if (err) {
return callback(err);
}
return _deleteVersionList(data.DeleteMarkers, bucket, err => {
if (err) {
return callback(err);
}
return _deleteVersionList(data.Versions, bucket, callback);
});
});
}
// setup test
before(done => {
versionIds = [];
const config = getConfig('default', { signatureVersion: 'v4' });
s3 = new S3(config);
s3.createBucket({ Bucket: bucket }, done);
});
// delete bucket after testing
after(done => {
// TODO: remove conditional after listing is implemented
if (process.env.AWS_ON_AIR === 'true') {
return _removeAllVersions(bucket, err => {
if (err) {
return done(err);
}
return s3.deleteBucket({ Bucket: bucket }, err => {
assert.strictEqual(err, null,
`Error deleting bucket: ${err}`);
return done();
});
});
}
return done();
});
it('creating non-versionned object', done => {
s3.putObject({
Bucket: bucket,
Key: key,
}, (err, res) => {
if (err) {
return done(err);
}
assert.equal(res.VersionId, undefined);
return done();
});
});
it('enable versioning', done => {
const params = {
Bucket: bucket,
VersioningConfiguration: {
Status: 'Enabled',
},
};
s3.putBucketVersioning(params, done);
});
it('should not send back error for non-existing key (specific version)',
done => {
s3.deleteObject({
Bucket: bucket,
Key: `${key}3`,
VersionId: 'null',
}, err => {
if (err) {
return done(err);
}
return done();
});
});
it('delete non existent object should create a delete marker', done => {
s3.deleteObject({
Bucket: bucket,
Key: `${key}2`,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.DeleteMarker, 'true');
assert.notEqual(res.VersionId, undefined);
return s3.deleteObject({
Bucket: bucket,
Key: `${key}2`,
}, (err, res2) => {
if (err) {
return done(err);
}
assert.strictEqual(res2.DeleteMarker, 'true');
assert.notEqual(res2.VersionId, res.VersionId);
return s3.deleteObject({
Bucket: bucket,
Key: `${key}2`,
VersionId: res.VersionId,
}, err => {
if (err) {
return done(err);
}
return s3.deleteObject({
Bucket: bucket,
Key: `${key}2`,
VersionId: res2.VersionId,
}, err => done(err));
});
});
});
});
it('put a version to the object', done => {
s3.putObject({
Bucket: bucket,
Key: key,
Body: 'test',
}, (err, res) => {
if (err) {
return done(err);
}
versionIds.push('null');
versionIds.push(res.VersionId);
assert.notEqual(res.VersionId, undefined);
return done();
});
});
it('should create a delete marker', done => {
s3.deleteObject({
Bucket: bucket,
Key: key,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.DeleteMarker, 'true');
assert.strictEqual(versionIds.find(item => item === res.VersionId),
undefined);
versionIds.push(res.VersionId);
return done();
});
});
it('should return 404 with a delete marker', done => {
s3.getObject({
Bucket: bucket,
Key: key,
}, function test(err) {
if (!err) {
return done(new Error('should return 404'));
}
const headers = this.httpResponse.headers;
assert.strictEqual(headers['x-amz-delete-marker'], 'true');
return done();
});
});
it('should delete the null version', done => {
const version = versionIds.shift();
s3.deleteObject({
Bucket: bucket,
Key: key,
VersionId: version,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.VersionId, version);
assert.equal(res.DeleteMarker, undefined);
return done();
});
});
it('should delete the versionned object', done => {
const version = versionIds.shift();
s3.deleteObject({
Bucket: bucket,
Key: key,
VersionId: version,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.VersionId, version);
assert.equal(res.DeleteMarker, undefined);
return done();
});
});
it('should delete the delete-marker version', done => {
const version = versionIds.shift();
s3.deleteObject({
Bucket: bucket,
Key: key,
VersionId: version,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.VersionId, version);
assert.equal(res.DeleteMarker, 'true');
return done();
});
});
it('put a new version', done => {
s3.putObject({
Bucket: bucket,
Key: key,
Body: 'test',
}, (err, res) => {
if (err) {
return done(err);
}
versionIds.push(res.VersionId);
assert.notEqual(res.VersionId, undefined);
return done();
});
});
it('get the null version', done => {
s3.getObject({
Bucket: bucket,
Key: key,
VersionId: 'null',
}, err => {
if (!err || err.code !== 'NoSuchVersion') {
return done(err || 'should send back an error');
}
return done();
});
});
it('suspending versioning', done => {
const params = {
Bucket: bucket,
VersioningConfiguration: {
Status: 'Suspended',
},
};
s3.putBucketVersioning(params, done);
});
it('delete non existent object should create a delete marker', done => {
s3.deleteObject({
Bucket: bucket,
Key: `${key}2`,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.DeleteMarker, 'true');
assert.notEqual(res.VersionId, undefined);
return s3.deleteObject({
Bucket: bucket,
Key: `${key}2`,
}, (err, res2) => {
if (err) {
return done(err);
}
assert.strictEqual(res2.DeleteMarker, 'true');
assert.strictEqual(res2.VersionId, res.VersionId);
return s3.deleteObject({
Bucket: bucket,
Key: `${key}2`,
VersionId: res.VersionId,
}, err => done(err));
});
});
});
it('should put a new delete marker', done => {
s3.deleteObject({
Bucket: bucket,
Key: key,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.DeleteMarker, 'true');
assert.strictEqual(res.VersionId, 'null');
return done();
});
});
it('enabling versioning', done => {
const params = {
Bucket: bucket,
VersioningConfiguration: {
Status: 'Enabled',
},
};
s3.putBucketVersioning(params, done);
});
it('should get the null version', done => {
s3.getObject({
Bucket: bucket,
Key: key,
VersionId: 'null',
}, function test(err) {
const headers = this.httpResponse.headers;
assert.strictEqual(headers['x-amz-delete-marker'], 'true');
assert.strictEqual(headers['x-amz-version-id'], 'null');
if (err && err.code !== 'MethodNotAllowed') {
return done(err);
} else if (err) {
return done();
}
return done('should return an error');
});
});
it('put a new version to store the null version', done => {
s3.putObject({
Bucket: bucket,
Key: key,
Body: 'test',
}, (err, res) => {
if (err) {
return done(err);
}
versionIds.push(res.VersionId);
return done();
});
});
it('suspending versioning', done => {
const params = {
Bucket: bucket,
VersioningConfiguration: {
Status: 'Suspended',
},
};
s3.putBucketVersioning(params, done);
});
it('put null version', done => {
s3.putObject({
Bucket: bucket,
Key: key,
Body: 'test-null-version',
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.VersionId, undefined);
return done();
});
});
it('enabling versioning', done => {
const params = {
Bucket: bucket,
VersioningConfiguration: {
Status: 'Enabled',
},
};
s3.putBucketVersioning(params, done);
});
it('should get the null version', done => {
s3.getObject({
Bucket: bucket,
Key: key,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.Body.toString(), 'test-null-version');
return done();
});
});
it('should add a delete marker', done => {
s3.deleteObject({
Bucket: bucket,
Key: key,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.DeleteMarker, 'true');
versionIds.push(res.VersionId);
return done();
});
});
it('should get the null version', done => {
s3.getObject({
Bucket: bucket,
Key: key,
VersionId: 'null',
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.Body.toString(), 'test-null-version');
return done();
});
});
it('should add a delete marker', done => {
s3.deleteObject({
Bucket: bucket,
Key: key,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.DeleteMarker, 'true');
assert.strictEqual(versionIds.find(item => item === res.VersionId),
undefined);
versionIds.push(res.VersionId);
return done();
});
});
it('should set the null version as master', done => {
let version = versionIds.pop();
s3.deleteObject({
Bucket: bucket,
Key: key,
VersionId: version,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.VersionId, version);
assert.strictEqual(res.DeleteMarker, 'true');
version = versionIds.pop();
return s3.deleteObject({
Bucket: bucket,
Key: key,
VersionId: version,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.VersionId, version);
assert.strictEqual(res.DeleteMarker, 'true');
return s3.getObject({
Bucket: bucket,
Key: key,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.Body.toString(),
'test-null-version');
return done();
});
});
});
});
it('should delete null version', done => {
s3.deleteObject({
Bucket: bucket,
Key: key,
VersionId: 'null',
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.VersionId, 'null');
return s3.getObject({
Bucket: bucket,
Key: key,
}, (err, res) => {
if (err) {
return done(err);
}
assert.strictEqual(res.VersionId,
versionIds[versionIds.length - 1]);
return done();
});
});
});
it('should be able to delete the bucket', done => {
async.eachSeries(versionIds, (id, next) => {
s3.deleteObject({
Bucket: bucket,
Key: key,
VersionId: id,
}, (err, res) => {
if (err) {
return next(err);
}
assert.strictEqual(res.VersionId, id);
return next();
});
}, err => {
if (err) {
return done(err);
}
return s3.deleteBucket({ Bucket: bucket }, err => done(err));
});
});
});

View File

@ -0,0 +1,361 @@
import assert from 'assert';
import { S3 } from 'aws-sdk';
import async from 'async';
import getConfig from '../support/config';
const bucket = `versioning-bucket-${Date.now()}`;
const testing = process.env.VERSIONING === 'no' ?
describe.skip : describe;
testing('listObject - Delimiter master', function testSuite() {
this.timeout(600000);
let s3 = undefined;
function _deleteVersionList(versionList, bucket, callback) {
async.each(versionList, (versionInfo, cb) => {
const versionId = versionInfo.VersionId;
const params = { Bucket: bucket, Key: versionInfo.Key,
VersionId: versionId };
s3.deleteObject(params, cb);
}, callback);
}
function _removeAllVersions(bucket, callback) {
return s3.listObjectVersions({ Bucket: bucket }, (err, data) => {
if (err && err.NoSuchBucket) {
return callback();
} else if (err) {
return callback(err);
}
return _deleteVersionList(data.DeleteMarkers, bucket, err => {
if (err) {
return callback(err);
}
return _deleteVersionList(data.Versions, bucket, callback);
});
});
}
// setup test
before(done => {
const config = getConfig('default', { signatureVersion: 'v4' });
s3 = new S3(config);
s3.createBucket({ Bucket: bucket }, done);
});
// delete bucket after testing
after(done => {
_removeAllVersions(bucket, err => {
if (err) {
return done(err);
}
return s3.deleteBucket({ Bucket: bucket }, err => {
assert.strictEqual(err, null,
`Error deleting bucket: ${err}`);
return done();
});
});
});
let versioning = false;
const objects = [
{ name: 'notes/summer/august/1.txt', value: 'foo', isNull: true },
{ name: 'notes/year.txt', value: 'foo', isNull: true },
{ name: 'notes/yore.rs', value: 'foo', isNull: true },
{ name: 'notes/zaphod/Beeblebrox.txt', value: 'foo', isNull: true },
{ name: 'Pâtisserie=中文-español-English', value: 'foo' },
{ name: 'Pâtisserie=中文-español-English', value: 'bar' },
{ name: 'notes/spring/1.txt', value: 'qux' },
{ name: 'notes/spring/1.txt', value: 'foo' },
{ name: 'notes/spring/1.txt', value: 'bar' },
{ name: 'notes/spring/2.txt', value: 'foo' },
{ name: 'notes/spring/2.txt', value: null },
{ name: 'notes/spring/march/1.txt', value: 'foo' },
{ name: 'notes/spring/march/1.txt', value: 'bar', isNull: true },
{ name: 'notes/summer/1.txt', value: 'foo' },
{ name: 'notes/summer/1.txt', value: 'bar' },
{ name: 'notes/summer/2.txt', value: 'bar' },
{ name: 'notes/summer/4.txt', value: null },
{ name: 'notes/summer/4.txt', value: null },
{ name: 'notes/summer/4.txt', value: null },
{ name: 'notes/summer/444.txt', value: null },
{ name: 'notes/summer/44444.txt', value: null },
];
it('put objects inside bucket', done => {
async.eachSeries(objects, (obj, next) => {
async.waterfall([
next => {
if (!versioning && obj.isNull !== true) {
const params = {
Bucket: bucket,
VersioningConfiguration: {
Status: 'Enabled',
},
};
versioning = true;
return s3.putBucketVersioning(params, err => next(err));
} else if (versioning && obj.isNull === true) {
const params = {
Bucket: bucket,
VersioningConfiguration: {
Status: 'Suspended',
},
};
versioning = false;
return s3.putBucketVersioning(params, err => next(err));
}
return next();
},
next => {
if (obj.value === null) {
return s3.deleteObject({
Bucket: bucket,
Key: obj.name,
}, function test(err) {
const headers = this.httpResponse.headers;
assert.strictEqual(headers['x-amz-delete-marker'],
'true');
return next(err);
});
}
return s3.putObject({
Bucket: bucket,
Key: obj.name,
Body: obj.value,
}, err => next(err));
},
], err => next(err));
}, err => done(err));
});
[
{
name: 'basic listing',
params: {},
expectedResult: [
'Pâtisserie=中文-español-English',
'notes/spring/1.txt',
'notes/spring/march/1.txt',
'notes/summer/1.txt',
'notes/summer/2.txt',
'notes/summer/august/1.txt',
'notes/year.txt',
'notes/yore.rs',
'notes/zaphod/Beeblebrox.txt',
],
commonPrefix: [],
isTruncated: false,
nextMarker: undefined,
},
{
name: 'with valid marker',
params: { Marker: 'notes/summer/1.txt' },
expectedResult: [
'notes/summer/2.txt',
'notes/summer/august/1.txt',
'notes/year.txt',
'notes/yore.rs',
'notes/zaphod/Beeblebrox.txt',
],
commonPrefix: [],
isTruncated: false,
nextMarker: undefined,
},
{
name: 'with bad marker',
params: { Marker: 'zzzz', Delimiter: '/' },
expectedResult: [],
commonPrefix: [],
isTruncated: false,
nextMarker: undefined,
},
{
name: 'with maxKeys',
params: { MaxKeys: 3 },
expectedResult: [
'Pâtisserie=中文-español-English',
'notes/spring/1.txt',
'notes/spring/march/1.txt',
],
commonPrefix: [],
isTruncated: true,
nextMarker: undefined,
},
{
name: 'with big maxKeys',
params: { MaxKeys: 15000 },
expectedResult: [
'Pâtisserie=中文-español-English',
'notes/spring/1.txt',
'notes/spring/march/1.txt',
'notes/summer/1.txt',
'notes/summer/2.txt',
'notes/summer/august/1.txt',
'notes/year.txt',
'notes/yore.rs',
'notes/zaphod/Beeblebrox.txt',
],
commonPrefix: [],
isTruncated: false,
nextMarker: undefined,
},
{
name: 'with delimiter',
params: { Delimiter: '/' },
expectedResult: [
'Pâtisserie=中文-español-English',
],
commonPrefix: ['notes/'],
isTruncated: false,
nextMarker: undefined,
},
{
name: 'with long delimiter',
params: { Delimiter: 'notes/summer' },
expectedResult: [
'Pâtisserie=中文-español-English',
'notes/spring/1.txt',
'notes/spring/march/1.txt',
'notes/year.txt',
'notes/yore.rs',
'notes/zaphod/Beeblebrox.txt',
],
commonPrefix: ['notes/summer'],
isTruncated: false,
nextMarker: undefined,
},
{
name: 'bad marker and good prefix',
params: {
Delimiter: '/',
Prefix: 'notes/summer/',
Marker: 'notes/summer0',
},
expectedResult: [],
commonPrefix: [],
isTruncated: false,
nextMarker: undefined,
},
{
name: 'delimiter and prefix (related to #147)',
params: { Delimiter: '/', Prefix: 'notes/' },
expectedResult: [
'notes/year.txt',
'notes/yore.rs',
],
commonPrefix: [
'notes/spring/',
'notes/summer/',
'notes/zaphod/',
],
isTruncated: false,
nextMarker: undefined,
},
{
name: 'delimiter, prefix and marker (related to #147)',
params: {
Delimiter: '/',
Prefix: 'notes/',
Marker: 'notes/year.txt',
},
expectedResult: ['notes/yore.rs'],
commonPrefix: ['notes/zaphod/'],
isTruncated: false,
nextMarker: undefined,
},
{
name: 'all parameters 1/5',
params: {
Delimiter: '/',
Prefix: 'notes/',
Marker: 'notes/',
MaxKeys: 1,
},
expectedResult: [],
commonPrefix: ['notes/spring/'],
isTruncated: true,
nextMarker: 'notes/spring/',
},
{
name: 'all parameters 2/5',
params: {
Delimiter: '/',
Prefix: 'notes/',
Marker: 'notes/spring/',
MaxKeys: 1,
},
expectedResult: [],
commonPrefix: ['notes/summer/'],
isTruncated: true,
nextMarker: 'notes/summer/',
},
{
name: 'all parameters 3/5',
params: {
Delimiter: '/',
Prefix: 'notes/',
Marker: 'notes/summer/',
MaxKeys: 1,
},
expectedResult: ['notes/year.txt'],
commonPrefix: [],
isTruncated: true,
nextMarker: 'notes/year.txt',
},
{
name: 'all parameters 4/5',
params: {
Delimiter: '/',
Prefix: 'notes/',
Marker: 'notes/year.txt',
MaxKeys: 1,
},
expectedResult: ['notes/yore.rs'],
commonPrefix: [],
isTruncated: true,
nextMarker: 'notes/yore.rs',
},
{
name: 'all parameters 5/5',
params: {
Delimiter: '/',
Prefix: 'notes/',
Marker: 'notes/yore.rs',
MaxKeys: 1,
},
expectedResult: [],
commonPrefix: ['notes/zaphod/'],
isTruncated: false,
nextMarker: undefined,
},
].forEach(test => {
it(test.name, done => {
const expectedResult = test.expectedResult;
s3.listObjects(Object.assign({ Bucket: bucket }, test.params),
(err, res) => {
if (err) {
return done(err);
}
res.Contents.forEach(result => {
if (!expectedResult.find(key => key === result.Key)) {
throw new Error(
`listing fail, unexpected key ${result.Key}`);
}
});
res.CommonPrefixes.forEach(cp => {
if (!test.commonPrefix.find(
item => item === cp.Prefix)) {
throw new Error(
`listing fail, unexpected prefix ${cp.Prefix}`);
}
});
assert.strictEqual(res.IsTruncated, test.isTruncated);
assert.strictEqual(res.NextMarker, test.nextMarker);
return done();
});
});
});
});

View File

@ -0,0 +1,376 @@
import assert from 'assert';
import { S3 } from 'aws-sdk';
import async from 'async';
import {
removeAllVersions,
constants,
} from '../../lib/utility/versioning-util.js';
import getConfig from '../support/config';
const config = getConfig('default', { signatureVersion: 'v4' });
const s3 = new S3(config);
const data = ['foo1', 'foo2'];
const counter = 100;
let bucket;
const key = '/';
function _assertNoError(err, desc) {
assert.strictEqual(err, null, `Unexpected err ${desc}: ${err}`);
}
const testing = process.env.VERSIONING === 'no' ? describe.skip : describe;
testing('put and get object with versioning', function testSuite() {
this.timeout(600000);
beforeEach(done => {
bucket = `versioning-bucket-${Date.now()}`;
s3.createBucket({ Bucket: bucket }, done);
});
afterEach(done => {
removeAllVersions({ Bucket: bucket }, err => {
if (err) {
return done(err);
}
return s3.deleteBucket({ Bucket: bucket }, done);
});
});
it('should put and get a non-versioned object without including ' +
'version ids in response headers', done => {
const params = { Bucket: bucket, Key: key };
s3.putObject(params, (err, data) => {
_assertNoError(err, 'putting object');
assert.strictEqual(data.VersionId, undefined);
s3.getObject(params, (err, data) => {
_assertNoError(err, 'getting object');
assert.strictEqual(data.VersionId, undefined);
done();
});
});
});
it('version-specific get should still not return version id in ' +
'response header', done => {
const params = { Bucket: bucket, Key: key };
s3.putObject(params, (err, data) => {
_assertNoError(err, 'putting object');
assert.strictEqual(data.VersionId, undefined);
params.VersionId = 'null';
s3.getObject(params, (err, data) => {
_assertNoError(err, 'getting specific object version "null"');
assert.strictEqual(data.VersionId, undefined);
done();
});
});
});
describe('on a version-enabled bucket', () => {
beforeEach(done => {
s3.putBucketVersioning({
Bucket: bucket,
VersioningConfiguration: constants.versioningEnabled,
}, done);
});
it('should create a new version for an object', done => {
const params = { Bucket: bucket, Key: key };
s3.putObject(params, (err, data) => {
_assertNoError(err, 'putting object');
params.VersionId = data.VersionId;
s3.getObject(params, (err, data) => {
_assertNoError(err, 'getting object');
assert.strictEqual(params.VersionId, data.VersionId,
'version ids are not equal');
done();
});
});
});
});
describe('on a version-enabled bucket with non-versioned object', () => {
const eTags = [];
beforeEach(done => {
s3.putObject({ Bucket: bucket, Key: key, Body: data[0] },
(err, data) => {
if (err) {
done(err);
}
eTags.push(data.ETag);
s3.putBucketVersioning({
Bucket: bucket,
VersioningConfiguration: constants.versioningEnabled,
}, done);
});
});
afterEach(done => {
// reset eTags
eTags.length = 0;
done();
});
it('should get null version in versioning enabled bucket',
done => {
const paramsNull = { Bucket: bucket, Key: '/', VersionId: 'null' };
s3.getObject(paramsNull, err => {
_assertNoError(err, 'getting null version');
done();
});
});
it('should keep null version and create a new version for an object',
done => {
const params = { Bucket: bucket, Key: key, Body: data[1] };
s3.putObject(params, (err, data) => {
const newVersion = data.VersionId;
eTags.push(data.ETag);
s3.getObject({ Bucket: bucket, Key: key,
VersionId: newVersion }, (err, data) => {
assert.strictEqual(err, null);
assert.strictEqual(data.VersionId, newVersion,
'version ids are not equal');
assert.strictEqual(data.ETag, eTags[1]);
s3.getObject({ Bucket: bucket, Key: key,
VersionId: 'null' }, (err, data) => {
_assertNoError(err, 'getting null version');
assert.strictEqual(data.VersionId, 'null');
assert.strictEqual(data.ETag, eTags[0]);
done();
});
});
});
});
it('should create new versions but still keep nullVersionId',
done => {
const versionIds = [];
const params = { Bucket: bucket, Key: key };
const paramsNull = { Bucket: bucket, Key: key, VersionId: 'null' };
// create new versions
async.timesSeries(counter, (i, next) => s3.putObject(params,
(err, data) => {
versionIds.push(data.VersionId);
// get the 'null' version
s3.getObject(paramsNull, (err, nullVerData) => {
assert.strictEqual(err, null);
assert.strictEqual(nullVerData.ETag, eTags[0]);
assert.strictEqual(nullVerData.VersionId, 'null');
next(err);
});
}), done);
});
});
describe('on version-suspended bucket', () => {
beforeEach(done => {
s3.putBucketVersioning({
Bucket: bucket,
VersioningConfiguration: constants.versioningSuspended,
}, done);
});
it('should not return version id for new object', done => {
const params = { Bucket: bucket, Key: key, Body: 'foo' };
const paramsNull = { Bucket: bucket, Key: key, VersionId: 'null' };
s3.putObject(params, (err, data) => {
const eTag = data.ETag;
_assertNoError(err, 'putting object');
assert.strictEqual(data.VersionId, undefined);
// getting null version should return object we just put
s3.getObject(paramsNull, (err, nullVerData) => {
_assertNoError(err, 'getting null version');
assert.strictEqual(nullVerData.ETag, eTag);
assert.strictEqual(nullVerData.VersionId, 'null');
done();
});
});
});
it('should update null version if put object twice', done => {
const params = { Bucket: bucket, Key: key };
const params1 = { Bucket: bucket, Key: key, Body: data[0] };
const params2 = { Bucket: bucket, Key: key, Body: data[1] };
const paramsNull = { Bucket: bucket, Key: key, VersionId: 'null' };
const eTags = [];
async.waterfall([
callback => s3.putObject(params1, (err, data) => {
_assertNoError(err, 'putting first object');
assert.strictEqual(data.VersionId, undefined);
eTags.push(data.ETag);
callback();
}),
callback => s3.getObject(params, (err, data) => {
_assertNoError(err, 'getting master version');
assert.strictEqual(data.VersionId, 'null');
assert.strictEqual(data.ETag, eTags[0],
'wrong object data');
callback();
}),
callback => s3.putObject(params2, (err, data) => {
_assertNoError(err, 'putting second object');
assert.strictEqual(data.VersionId, undefined);
eTags.push(data.ETag);
callback();
}),
callback => s3.getObject(paramsNull, (err, data) => {
_assertNoError(err, 'getting null version');
assert.strictEqual(data.VersionId, 'null');
assert.strictEqual(data.ETag, eTags[1],
'wrong object data');
callback();
}),
], done);
});
});
describe('on a version-suspended bucket with non-versioned object', () => {
const eTags = [];
beforeEach(done => {
s3.putObject({ Bucket: bucket, Key: key, Body: data[0] },
(err, data) => {
if (err) {
done(err);
}
eTags.push(data.ETag);
s3.putBucketVersioning({
Bucket: bucket,
VersioningConfiguration: constants.versioningSuspended,
}, done);
});
});
afterEach(done => {
// reset eTags
eTags.length = 0;
done();
});
it('should get null version in versioning suspended bucket',
done => {
const paramsNull = { Bucket: bucket, Key: '/', VersionId: 'null' };
s3.getObject(paramsNull, err => {
_assertNoError(err, 'getting null version');
done();
});
});
it('should update null version in versioning suspended bucket',
done => {
const params = { Bucket: bucket, Key: key };
const putParams = { Bucket: bucket, Key: '/', Body: data[1] };
const paramsNull = { Bucket: bucket, Key: key, VersionId: 'null' };
async.waterfall([
callback => s3.getObject(paramsNull, (err, data) => {
_assertNoError(err, 'getting null version');
assert.strictEqual(data.VersionId, 'null');
callback();
}),
callback => s3.putObject(putParams, (err, data) => {
_assertNoError(err, 'putting object');
assert.strictEqual(data.VersionId, undefined);
eTags.push(data.ETag);
callback();
}),
callback => s3.getObject(paramsNull, (err, data) => {
_assertNoError(err, 'getting null version');
assert.strictEqual(data.VersionId, 'null');
assert.strictEqual(data.ETag, eTags[1],
'wrong object data');
callback();
}),
callback => s3.getObject(params, (err, data) => {
_assertNoError(err, 'getting master version');
assert.strictEqual(data.VersionId, 'null');
assert.strictEqual(data.ETag, eTags[1],
'wrong object data');
callback();
}),
], done);
});
});
describe('on versioning suspended then enabled bucket with null version',
() => {
const eTags = [];
beforeEach(done => {
const params = { Bucket: bucket, Key: key, Body: data[0] };
async.waterfall([
callback => s3.putBucketVersioning({
Bucket: bucket,
VersioningConfiguration: constants.versioningSuspended,
}, err => callback(err)),
callback => s3.putObject(params, (err, data) => {
if (err) {
callback(err);
}
eTags.push(data.ETag);
callback();
}),
callback => s3.putBucketVersioning({
Bucket: bucket,
VersioningConfiguration: constants.versioningEnabled,
}, callback),
], done);
});
afterEach(done => {
// reset eTags
eTags.length = 0;
done();
});
it('should preserve the null version when creating new versions',
done => {
const params = { Bucket: bucket, Key: key };
const paramsNull = { Bucket: bucket, Key: key, VersionId: 'null' };
async.waterfall([
callback => s3.getObject(paramsNull, (err, nullVerData) => {
_assertNoError(err, 'getting null version');
assert.strictEqual(nullVerData.ETag, eTags[0]);
assert.strictEqual(nullVerData.VersionId, 'null');
callback();
}),
callback => async.timesSeries(counter, (i, next) =>
s3.putObject(params, (err, data) => {
_assertNoError(err, `putting object #${i}`);
assert.notEqual(data.VersionId, undefined);
next();
}), err => callback(err)),
callback => s3.getObject(paramsNull, (err, nullVerData) => {
_assertNoError(err, 'getting null version');
assert.strictEqual(nullVerData.ETag, eTags[0]);
callback();
}),
], done);
});
it('should create a bunch of objects and their versions', done => {
const vids = [];
const keycount = 50;
const versioncount = 20;
const value = '{"foo":"bar"}';
async.times(keycount, (i, next1) => {
const key = `foo${i}`;
const params = { Bucket: bucket, Key: key, Body: value };
async.times(versioncount, (j, next2) =>
s3.putObject(params, (err, data) => {
assert.strictEqual(err, null);
assert(data.VersionId, 'invalid versionId');
vids.push({ Key: key, VersionId: data.VersionId });
next2();
}), next1);
}, err => {
assert.strictEqual(err, null);
assert.strictEqual(vids.length, keycount * versioncount);
// TODO use delete marker and check with the result
process.stdout.write('creating objects done, now deleting...');
done();
});
});
});
});

View File

@ -0,0 +1,130 @@
import assert from 'assert';
import { S3 } from 'aws-sdk';
import async from 'async';
import getConfig from '../support/config';
const bucket = `versioning-bucket-${Date.now()}`;
function comp(v1, v2) {
if (v1.Key > v2.Key) {
return 1;
}
if (v1.Key < v2.Key) {
return -1;
}
if (v1.VersionId > v2.VersionId) {
return 1;
}
if (v1.VersionId < v2.VersionId) {
return -1;
}
return 0;
}
const testing = process.env.VERSIONING === 'no' ? describe.skip : describe;
testing('aws-node-sdk test bucket versioning listing', function testSuite() {
this.timeout(600000);
let s3 = undefined;
const masterVersions = [];
const allVersions = [];
// setup test
before(done => {
const config = getConfig('default', { signatureVersion: 'v4' });
s3 = new S3(config);
s3.createBucket({ Bucket: bucket }, done);
});
// delete bucket after testing
after(done => s3.deleteBucket({ Bucket: bucket }, done));
it('should accept valid versioning configuration', done => {
const params = {
Bucket: bucket,
VersioningConfiguration: {
Status: 'Enabled',
},
};
s3.putBucketVersioning(params, done);
});
it('should create a bunch of objects and their versions', done => {
const keycount = 20;
const versioncount = 20;
const value = '{"foo":"bar"}';
async.times(keycount, (i, next1) => {
const key = `foo${i}`;
masterVersions.push(key);
const params = { Bucket: bucket, Key: key, Body: value };
async.times(versioncount, (j, next2) =>
s3.putObject(params, (err, data) => {
assert.strictEqual(err, null);
assert(data.VersionId, 'invalid versionId');
allVersions.push({ Key: key, VersionId: data.VersionId });
next2();
}), next1);
}, err => {
assert.strictEqual(err, null);
assert.strictEqual(allVersions.length, keycount * versioncount);
done();
});
});
it('should list all latest versions', done => {
const params = { Bucket: bucket, MaxKeys: 1000, Delimiter: '/' };
s3.listObjects(params, (err, data) => {
const keys = data.Contents.map(entry => entry.Key);
assert.deepStrictEqual(keys.sort(), masterVersions.sort(),
'not same keys');
done();
});
});
it('should create some delete markers', done => {
const keycount = 15;
async.times(keycount, (i, next) => {
const key = masterVersions[i];
const params = { Bucket: bucket, Key: key };
s3.deleteObject(params, (err, data) => {
assert.strictEqual(err, null);
assert(data.VersionId, 'invalid versionId');
allVersions.push({ Key: key, VersionId: data.VersionId });
next();
});
}, done);
});
it('should list all latest versions', done => {
const params = { Bucket: bucket, MaxKeys: 1000, Delimiter: '/' };
s3.listObjects(params, (err, data) => {
const keys = data.Contents.map(entry => entry.Key);
assert.deepStrictEqual(keys.sort(), masterVersions.sort().slice(15),
'not same keys');
done();
});
});
it('should list all versions', done => {
const versions = [];
const params = { Bucket: bucket, MaxKeys: 15, Delimiter: '/' };
async.retry(100, done => s3.listObjectVersions(params, (err, data) => {
data.Versions.forEach(version => versions.push({
Key: version.Key, VersionId: version.VersionId }));
data.DeleteMarkers.forEach(version => versions.push({
Key: version.Key, VersionId: version.VersionId }));
if (data.IsTruncated) {
params.KeyMarker = data.NextKeyMarker;
params.VersionIdMarker = data.NextVersionIdMarker;
return done('not done yet');
}
return done();
}), () => {
assert.deepStrictEqual(versions.sort(comp), allVersions.sort(comp),
'not same versions');
const params = { Bucket: bucket, Delete: { Objects: allVersions } };
s3.deleteObjects(params, done);
});
});
});

View File

@ -0,0 +1,341 @@
import assert from 'assert';
import { S3 } from 'aws-sdk';
import async from 'async';
import getConfig from '../support/config';
const bucket = `versioning-bucket-${Date.now()}`;
const testing = process.env.VERSIONING === 'no' ?
describe.skip : describe;
testing('aws-node-sdk test bucket versioning', function testSuite() {
this.timeout(600000);
let s3 = undefined;
const versionIds = [];
const counter = 100;
// setup test
before(done => {
const config = getConfig('default', { signatureVersion: 'v4' });
s3 = new S3(config);
s3.createBucket({ Bucket: bucket }, done);
});
// delete bucket after testing
after(done => s3.deleteBucket({ Bucket: bucket }, done));
it('should not accept empty versioning configuration', done => {
const params = {
Bucket: bucket,
VersioningConfiguration: {},
};
s3.putBucketVersioning(params, error => {
if (error) {
assert.strictEqual(error.statusCode, 400);
assert.strictEqual(
error.code, 'IllegalVersioningConfigurationException');
done();
} else {
done('accepted empty versioning configuration');
}
});
});
it('should retrieve an empty versioning configuration', done => {
const params = { Bucket: bucket };
s3.getBucketVersioning(params, (error, data) => {
assert.strictEqual(error, null);
assert.deepStrictEqual(data, {});
done();
});
});
it('should not accept versioning configuration w/o \"Status\"', done => {
const params = {
Bucket: bucket,
VersioningConfiguration: {
MFADelete: 'Enabled',
},
};
s3.putBucketVersioning(params, error => {
if (error) {
assert.strictEqual(error.statusCode, 400);
assert.strictEqual(
error.code, 'IllegalVersioningConfigurationException');
done();
} else {
done('accepted empty versioning configuration');
}
});
});
it('should retrieve an empty versioning configuration', done => {
const params = { Bucket: bucket };
s3.getBucketVersioning(params, (error, data) => {
assert.strictEqual(error, null);
assert.deepStrictEqual(data, {});
done();
});
});
it('should not accept versioning configuration w/ invalid value', done => {
const params = {
Bucket: bucket,
VersioningConfiguration: {
MFADelete: 'fun',
Status: 'let\'s do it',
},
};
s3.putBucketVersioning(params, error => {
if (error) {
assert.strictEqual(error.statusCode, 400);
assert.strictEqual(
error.code, 'IllegalVersioningConfigurationException');
done();
} else {
done('accepted empty versioning configuration');
}
});
});
it('should retrieve an empty versioning configuration', done => {
const params = { Bucket: bucket };
s3.getBucketVersioning(params, (error, data) => {
assert.strictEqual(error, null);
assert.deepStrictEqual(data, {});
done();
});
});
it('should create a non-versioned object', done => {
const params = { Bucket: bucket, Key: '/' };
s3.putObject(params, err => {
assert.strictEqual(err, null);
s3.getObject(params, err => {
assert.strictEqual(err, null);
done();
});
});
});
it('should accept valid versioning configuration', done => {
const params = {
Bucket: bucket,
VersioningConfiguration: {
Status: 'Enabled',
},
};
s3.putBucketVersioning(params, done);
});
it('should retrieve the valid versioning configuration', done => {
const params = { Bucket: bucket };
s3.getBucketVersioning(params, (error, data) => {
assert.strictEqual(error, null);
assert.deepStrictEqual(data, { Status: 'Enabled' });
done();
});
});
it('should create a new version for an object', done => {
const params = { Bucket: bucket, Key: '/' };
s3.putObject(params, (err, data) => {
assert.strictEqual(err, null);
params.VersionId = data.VersionId;
versionIds.push(data.VersionId);
s3.getObject(params, (err, data) => {
assert.strictEqual(err, null);
assert.strictEqual(params.VersionId, data.VersionId,
'version ids are not equal');
// TODO compare the value of null version and the original
// version when find out how to include value in the put
params.VersionId = 'null';
s3.getObject(params, done);
});
});
});
it('should create new versions but still keep nullVersionId', done => {
const params = { Bucket: bucket, Key: '/' };
const paramsNull = { Bucket: bucket, Key: '/', VersionId: 'null' };
let nullVersionId = undefined;
// create new versions
async.timesSeries(counter, (i, next) => s3.putObject(params,
(err, data) => {
versionIds.push(data.VersionId);
// get the 'null' version
s3.getObject(paramsNull, (err, data) => {
assert.strictEqual(err, null);
if (nullVersionId === undefined) {
nullVersionId = data.VersionId;
}
// what to expect: nullVersionId should be the same
assert(nullVersionId, 'nullVersionId should be valid');
assert.strictEqual(nullVersionId, data.VersionId);
next(err);
});
}), done);
});
it('should accept valid versioning configuration', done => {
const params = {
Bucket: bucket,
VersioningConfiguration: {
Status: 'Suspended',
},
};
s3.putBucketVersioning(params, done);
});
it('should retrieve the valid versioning configuration', done => {
const params = { Bucket: bucket };
// s3.getBucketVersioning(params, done);
s3.getBucketVersioning(params, (error, data) => {
assert.strictEqual(error, null);
assert.deepStrictEqual(data, { Status: 'Suspended' });
done();
});
});
it('should update null version in versioning suspended bucket', done => {
const params = { Bucket: bucket, Key: '/' };
const paramsNull = { Bucket: bucket, Key: '/', VersionId: 'null' };
// let nullVersionId = undefined;
// let newNullVersionId = undefined;
async.waterfall([
callback => s3.getObject(paramsNull, err => {
assert.strictEqual(err, null);
// nullVersionId = data.VersionId;
callback();
}),
callback => s3.putObject(params, err => {
assert.strictEqual(err, null);
versionIds.push('null');
callback();
}),
callback => s3.getObject(paramsNull, (err, data) => {
assert.strictEqual(err, null);
assert.strictEqual(data.VersionId, 'null',
'version ids are equal');
callback();
}),
callback => s3.getObject(params, (err, data) => {
assert.strictEqual(err, null);
assert.strictEqual(data.VersionId, 'null',
'version ids are not equal');
callback();
}),
], done);
});
it('should enable versioning and preserve the null version', done => {
const paramsVersioning = {
Bucket: bucket,
VersioningConfiguration: {
Status: 'Enabled',
},
};
const params = { Bucket: bucket, Key: '/' };
const paramsNull = { Bucket: bucket, Key: '/', VersionId: 'null' };
let nullVersionId = undefined;
async.waterfall([
callback => s3.getObject(paramsNull, (err, data) => {
assert.strictEqual(err, null);
nullVersionId = data.VersionId;
callback();
}),
callback => s3.putBucketVersioning(paramsVersioning,
err => callback(err)),
callback => async.timesSeries(counter, (i, next) =>
s3.putObject(params, (err, data) => {
assert.strictEqual(err, null);
versionIds.push(data.VersionId);
next();
}), err => callback(err)),
callback => s3.getObject(paramsNull, (err, data) => {
assert.strictEqual(err, null);
assert.strictEqual(nullVersionId, data.VersionId,
'version ids are not equal');
callback();
}),
], done);
});
it('should create delete marker and keep the null version', done => {
const params = { Bucket: bucket, Key: '/' };
const paramsNull = { Bucket: bucket, Key: '/', VersionId: 'null' };
s3.getObject(paramsNull, (err, data) => {
assert.strictEqual(err, null);
const nullVersionId = data.VersionId;
async.timesSeries(counter, (i, next) => s3.deleteObject(params,
(err, data) => {
assert.strictEqual(err, null);
versionIds.push(data.VersionId);
s3.getObject(params, err => {
assert.strictEqual(err.code, 'NoSuchKey');
next();
});
}), err => {
assert.strictEqual(err, null);
s3.getObject(paramsNull, (err, data) => {
assert.strictEqual(nullVersionId, data.VersionId,
'version ids are not equal');
done();
});
});
});
});
it('should delete latest version and get the next version', done => {
versionIds.reverse();
const params = { Bucket: bucket, Key: '/' };
async.timesSeries(versionIds.length, (i, next) => {
const versionId = versionIds[i];
const nextVersionId = i < versionIds.length ?
versionIds[i + 1] : undefined;
const paramsVersion =
{ Bucket: bucket, Key: '/', VersionId: versionId };
s3.deleteObject(paramsVersion, err => {
assert.strictEqual(err, null);
s3.getObject(params, (err, data) => {
if (err) {
assert(err.code === 'NotFound' ||
err.code === 'NoSuchKey', 'error');
} else {
assert(data.VersionId, 'invalid versionId');
if (nextVersionId !== 'null') {
assert.strictEqual(data.VersionId, nextVersionId);
}
}
next();
});
});
}, done);
});
it('should create a bunch of objects and their versions', done => {
const vids = [];
const keycount = 50;
const versioncount = 20;
const value = '{"foo":"bar"}';
async.times(keycount, (i, next1) => {
const key = `foo${i}`;
const params = { Bucket: bucket, Key: key, Body: value };
async.times(versioncount, (j, next2) =>
s3.putObject(params, (err, data) => {
assert.strictEqual(err, null);
assert(data.VersionId, 'invalid versionId');
vids.push({ Key: key, VersionId: data.VersionId });
next2();
}), next1);
}, err => {
assert.strictEqual(err, null);
assert.strictEqual(vids.length, keycount * versioncount);
const params = { Bucket: bucket, Delete: { Objects: vids } };
// TODO use delete marker and check with the result
process.stdout.write('creating objects done, now deleting...');
s3.deleteObjects(params, done);
});
});
});

View File

@ -8,7 +8,7 @@ const objectKey = 'key';
const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it;
describe('unsupported query requests:', () => {
constants.unsupportedQueries.forEach(query => {
Object.keys(constants.unsupportedQueries).forEach(query => {
itSkipIfAWS(`should respond with NotImplemented for ?${query} request`,
done => {
const queryObj = {};
@ -24,7 +24,7 @@ describe('unsupported query requests:', () => {
itSkipIfAWS('should accept blacklisted query key as a query value ' +
'to a query key that is not on the blacklist', done => {
const queryObj = { test: constants.unsupportedQueries[0] };
const queryObj = { test: Object.keys(constants.unsupportedQueries)[0] };
makeS3Request({ method: 'GET', queryObj, bucket, objectKey }, err => {
assert.strictEqual(err.code, 'NoSuchBucket');
assert.strictEqual(err.statusCode, 404);

View File

@ -11,7 +11,7 @@ import constants from '../../../constants';
import initiateMultipartUpload from '../../../lib/api/initiateMultipartUpload';
import metadata from '../metadataswitch';
import * as metadataMem from '../../../lib/metadata/in_memory/metadata';
import objectPut from '../../../lib/api/objectPut';
import { objectPut } from '../../../lib/api/objectPut';
import objectPutPart from '../../../lib/api/objectPutPart';
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
import DummyRequest from '../DummyRequest';

View File

@ -6,7 +6,7 @@ import { parseString } from 'xml2js';
import bucketGet from '../../../lib/api/bucketGet';
import bucketPut from '../../../lib/api/bucketPut';
import objectPut from '../../../lib/api/objectPut';
import { objectPut } from '../../../lib/api/objectPut';
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
import DummyRequest from '../DummyRequest';

View File

@ -31,7 +31,7 @@ import objectDelete from '../../../lib/api/objectDelete';
import objectGet from '../../../lib/api/objectGet';
import objectGetACL from '../../../lib/api/objectGetACL';
import objectHead from '../../../lib/api/objectHead';
import objectPut from '../../../lib/api/objectPut';
import { objectPut } from '../../../lib/api/objectPut';
import objectPutACL from '../../../lib/api/objectPutACL';
import objectPutPart from '../../../lib/api/objectPutPart';
import { DummyRequestLogger, makeAuthInfo } from '../helpers';

View File

@ -7,7 +7,7 @@ import { metadata } from '../../../lib/metadata/in_memory/metadata';
import { ds } from '../../../lib/data/in_memory/backend';
import DummyRequest from '../DummyRequest';
import bucketPut from '../../../lib/api/bucketPut';
import objectPut from '../../../lib/api/objectPut';
import { objectPut } from '../../../lib/api/objectPut';
const log = new DummyRequestLogger();
const canonicalID = 'accessKey1';
@ -29,6 +29,11 @@ const testBucketPutRequest = new DummyRequest({
describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
let testPutObjectRequest1;
let testPutObjectRequest2;
const request = new DummyRequest({
headers: {},
parsedContentLength: contentLength,
}, postBody);
const bucket = { getVersioningConfiguration: () => null };
beforeEach(done => {
cleanup();
@ -66,8 +71,8 @@ describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
it('should successfully get object metadata and then ' +
'delete metadata and data', done => {
getObjMetadataAndDelete(bucketName, true,
[], [objectKey1, objectKey2], log,
getObjMetadataAndDelete(authInfo, 'foo', request, bucketName, bucket,
true, [], [{ key: objectKey1 }, { key: objectKey2 }], log,
(err, quietSetting, errorResults, numOfObjects,
successfullyDeleted, totalContentLengthDeleted) => {
assert.ifError(err);
@ -90,8 +95,8 @@ describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
});
it('should return success results if no such key', done => {
getObjMetadataAndDelete(bucketName, true,
[], ['madeup1', 'madeup2'], log,
getObjMetadataAndDelete(authInfo, 'foo', request, bucketName, bucket,
true, [], [{ key: 'madeup1' }, { key: 'madeup2' }], log,
(err, quietSetting, errorResults, numOfObjects,
successfullyDeleted, totalContentLengthDeleted) => {
assert.ifError(err);
@ -114,19 +119,19 @@ describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
// even though the getObjMetadataAndDelete function would
// never be called if there was no bucket (would error out earlier
// in API)
getObjMetadataAndDelete('madeupbucket', true,
[], [objectKey1, objectKey2], log,
getObjMetadataAndDelete(authInfo, 'foo', request, 'madeupbucket',
bucket, true, [], [{ key: objectKey1 }, { key: objectKey2 }], log,
(err, quietSetting, errorResults, numOfObjects,
successfullyDeleted, totalContentLengthDeleted) => {
assert.ifError(err);
assert.strictEqual(quietSetting, true);
assert.deepStrictEqual(errorResults, [
{
key: objectKey1,
entry: { key: objectKey1 },
error: errors.NoSuchBucket,
},
{
key: objectKey2,
entry: { key: objectKey2 },
error: errors.NoSuchBucket,
},
]);
@ -142,8 +147,8 @@ describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
it('should return no error or success results if no objects in play',
done => {
getObjMetadataAndDelete(bucketName, true,
[], [], log,
getObjMetadataAndDelete(authInfo, 'foo', request, bucketName,
bucket, true, [], [], log,
(err, quietSetting, errorResults, numOfObjects,
successfullyDeleted, totalContentLengthDeleted) => {
assert.ifError(err);
@ -167,8 +172,9 @@ describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
error: errors.AccessDenied,
},
];
getObjMetadataAndDelete(bucketName, true,
errorResultsSample, [objectKey1, objectKey2], log,
getObjMetadataAndDelete(authInfo, 'foo', request, bucketName, bucket,
true, errorResultsSample,
[{ key: objectKey1 }, { key: objectKey2 }], log,
(err, quietSetting, errorResults, numOfObjects,
successfullyDeleted, totalContentLengthDeleted) => {
assert.ifError(err);

View File

@ -5,7 +5,7 @@ import bucketPut from '../../../lib/api/bucketPut';
import bucketPutACL from '../../../lib/api/bucketPutACL';
import constants from '../../../constants';
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
import objectPut from '../../../lib/api/objectPut';
import { objectPut } from '../../../lib/api/objectPut';
import objectDelete from '../../../lib/api/objectDelete';
import objectGet from '../../../lib/api/objectGet';
import DummyRequest from '../DummyRequest';

View File

@ -8,7 +8,7 @@ import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
import completeMultipartUpload from '../../../lib/api/completeMultipartUpload';
import DummyRequest from '../DummyRequest';
import initiateMultipartUpload from '../../../lib/api/initiateMultipartUpload';
import objectPut from '../../../lib/api/objectPut';
import { objectPut } from '../../../lib/api/objectPut';
import objectGet from '../../../lib/api/objectGet';
import objectPutPart from '../../../lib/api/objectPutPart';
@ -61,7 +61,7 @@ describe('objectGet API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined,
log, (err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectGet(authInfo, testGetRequest,
log, (err, result, responseMetaHeaders) => {
assert.strictEqual(responseMetaHeaders
@ -80,7 +80,7 @@ describe('objectGet API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectGet(authInfo, testGetRequest, log,
(err, dataGetInfo) => {
assert.deepStrictEqual(dataGetInfo,
@ -226,7 +226,7 @@ describe('objectGet API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectGet(authInfo, testGetRequest,
log, (err, result, responseMetaHeaders) => {
assert.strictEqual(result, null);

View File

@ -7,7 +7,7 @@ import { parseString } from 'xml2js';
import bucketPut from '../../../lib/api/bucketPut';
import constants from '../../../constants';
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
import objectPut from '../../../lib/api/objectPut';
import { objectPut } from '../../../lib/api/objectPut';
import objectGetACL from '../../../lib/api/objectGetACL';
import DummyRequest from '../DummyRequest';
@ -63,7 +63,7 @@ describe('objectGetACL API', () => {
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
undefined, log, next),
(result, corsHeaders, next) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
@ -104,7 +104,7 @@ describe('objectGetACL API', () => {
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
undefined, log, next),
(result, corsHeaders, next) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
@ -141,7 +141,7 @@ describe('objectGetACL API', () => {
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
undefined, log, next),
(result, corsHeaders, next) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
@ -185,7 +185,7 @@ describe('objectGetACL API', () => {
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
undefined, log, next),
(result, corsHeaders, next) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
@ -226,7 +226,7 @@ describe('objectGetACL API', () => {
(corsHeaders, next) => objectPut(
authInfo, testPutObjectRequest, undefined, log, next),
(result, corsHeaders, next) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
@ -266,7 +266,7 @@ describe('objectGetACL API', () => {
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
undefined, log, next),
(result, corsHeaders, next) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),
@ -316,7 +316,7 @@ describe('objectGetACL API', () => {
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
undefined, log, next),
(result, corsHeaders, next) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectGetACL(authInfo, testGetACLRequest, log, next);
},
(result, corsHeaders, next) => parseString(result, next),

View File

@ -3,7 +3,7 @@ import assert from 'assert';
import bucketPut from '../../../lib/api/bucketPut';
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
import objectPut from '../../../lib/api/objectPut';
import { objectPut } from '../../../lib/api/objectPut';
import objectHead from '../../../lib/api/objectHead';
import DummyRequest from '../DummyRequest';
@ -59,7 +59,7 @@ describe('objectHead API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectHead(authInfo, testGetRequest, log, err => {
assert.deepStrictEqual(err, errors.NotModified);
done();
@ -82,7 +82,7 @@ describe('objectHead API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectHead(authInfo, testGetRequest, log, err => {
assert.deepStrictEqual(err,
errors.PreconditionFailed);
@ -107,7 +107,7 @@ describe('objectHead API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectHead(authInfo, testGetRequest, log, err => {
assert.deepStrictEqual(err,
errors.PreconditionFailed);
@ -132,7 +132,7 @@ describe('objectHead API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectHead(authInfo, testGetRequest, log, err => {
assert.deepStrictEqual(err, errors.NotModified);
done();
@ -154,7 +154,7 @@ describe('objectHead API', () => {
locationConstraint, log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectHead(authInfo, testGetRequest, log,
(err, success) => {
assert.strictEqual(success[userMetadataKey],

View File

@ -6,7 +6,7 @@ import bucketPutACL from '../../../lib/api/bucketPutACL';
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
import { ds } from '../../../lib/data/in_memory/backend';
import metadata from '../metadataswitch';
import objectPut from '../../../lib/api/objectPut';
import { objectPut } from '../../../lib/api/objectPut';
import DummyRequest from '../DummyRequest';
const log = new DummyRequestLogger();
@ -35,7 +35,7 @@ function testAuth(bucketOwner, authUser, bucketPutReq, log, cb) {
objectPut(authUser, testPutObjectRequest, undefined,
log, (err, res) => {
assert.strictEqual(err, null);
assert.strictEqual(res, correctMD5);
assert.strictEqual(res.contentMD5, correctMD5);
cb();
});
});
@ -112,7 +112,7 @@ describe('objectPut API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
metadata.getObjectMD(bucketName, objectName,
{}, log, (err, md) => {
assert(md);
@ -147,7 +147,7 @@ describe('objectPut API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
metadata.getObjectMD(bucketName, objectName, {}, log,
(err, md) => {
assert(md);
@ -185,7 +185,7 @@ describe('objectPut API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
assert.deepStrictEqual(ds, []);
metadata.getObjectMD(bucketName, objectName, {}, log,
(err, md) => {

View File

@ -9,7 +9,7 @@ import { cleanup,
AccessControlPolicy,
} from '../helpers';
import metadata from '../metadataswitch';
import objectPut from '../../../lib/api/objectPut';
import { objectPut } from '../../../lib/api/objectPut';
import objectPutACL from '../../../lib/api/objectPutACL';
import DummyRequest from '../DummyRequest';
@ -63,7 +63,7 @@ describe('putObjectACL API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectPutACL(authInfo, testObjACLRequest, log, err => {
assert
.deepStrictEqual(err, errors.InvalidArgument);
@ -87,7 +87,7 @@ describe('putObjectACL API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectPutACL(authInfo, testObjACLRequest, log, err => {
assert.strictEqual(err, null);
metadata.getObjectMD(bucketName, objectName, {},
@ -125,7 +125,7 @@ describe('putObjectACL API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectPutACL(authInfo, testObjACLRequest1, log, err => {
assert.strictEqual(err, null);
metadata.getObjectMD(bucketName, objectName, {},
@ -169,7 +169,7 @@ describe('putObjectACL API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectPutACL(authInfo, testObjACLRequest, log, err => {
assert.strictEqual(err, null);
metadata.getObjectMD(bucketName, objectName, {},
@ -212,7 +212,7 @@ describe('putObjectACL API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectPutACL(authInfo, testObjACLRequest, log, err => {
assert.strictEqual(err,
errors.UnresolvableGrantByEmailAddress);
@ -244,7 +244,7 @@ describe('putObjectACL API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined,
log, (err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectPutACL(authInfo, testObjACLRequest, log, err => {
assert.strictEqual(err, null);
metadata.getObjectMD(bucketName, objectName, {},
@ -310,7 +310,7 @@ describe('putObjectACL API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectPutACL(authInfo, testObjACLRequest, log, err => {
assert.strictEqual(err, null);
metadata.getObjectMD(bucketName, objectName, {},
@ -350,7 +350,7 @@ describe('putObjectACL API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectPutACL(authInfo, testObjACLRequest, log, err => {
assert.strictEqual(err,
errors.UnresolvableGrantByEmailAddress);
@ -381,7 +381,7 @@ describe('putObjectACL API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectPutACL(authInfo, testObjACLRequest, log, err => {
assert.deepStrictEqual(err,
errors.MalformedACLError);
@ -411,7 +411,7 @@ describe('putObjectACL API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectPutACL(authInfo, testObjACLRequest, log, err => {
assert.deepStrictEqual(err, errors.MalformedXML);
done();
@ -440,7 +440,7 @@ describe('putObjectACL API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectPutACL(authInfo, testObjACLRequest, log, err => {
assert.deepStrictEqual(err, errors.InvalidArgument);
done();
@ -469,7 +469,7 @@ describe('putObjectACL API', () => {
log, () => {
objectPut(authInfo, testPutObjectRequest, undefined, log,
(err, result) => {
assert.strictEqual(result, correctMD5);
assert.strictEqual(result.contentMD5, correctMD5);
objectPutACL(authInfo, testObjACLRequest, log, err => {
assert.deepStrictEqual(err, errors.InvalidArgument);
done();

View File

@ -31,7 +31,7 @@ import objectDelete from '../../../lib/api/objectDelete';
import objectGet from '../../../lib/api/objectGet';
import objectGetACL from '../../../lib/api/objectGetACL';
import objectHead from '../../../lib/api/objectHead';
import objectPut from '../../../lib/api/objectPut';
import { objectPut } from '../../../lib/api/objectPut';
import objectPutACL from '../../../lib/api/objectPutACL';
import objectPutPart from '../../../lib/api/objectPutPart';
import { DummyRequestLogger, makeAuthInfo } from '../helpers';