Compare commits
25 Commits
developmen
...
exp/foo
Author | SHA1 | Date |
---|---|---|
Vinh Tao | 42c8a7d989 | |
Vinh Tao | fc3926b515 | |
Vinh Tao | 5b08c13739 | |
Vinh Tao | 8c0dfd9329 | |
Vinh Tao | de0bedc436 | |
Vinh Tao | b374c321c9 | |
Vinh Tao | 7ed5e8f3c8 | |
Vinh Tao | 4cdcd34ffa | |
Vinh Tao | 9f450c4530 | |
Vinh Tao | dff694df24 | |
Vinh Tao | ac74957a70 | |
Vinh Tao | b71d22eb51 | |
Vinh Tao | a16a1f3dac | |
Vinh Tao | 50afdafa52 | |
Vinh Tao | a9f4559e17 | |
Vinh Tao | 80b33222db | |
Vinh Tao | 306ca598c5 | |
Vinh Tao | 671506fb9f | |
Vinh Tao | 1986aa23ac | |
Vinh Tao | bbdce5d750 | |
Vinh Tao | 7f69be387b | |
Vinh Tao | 53512eb359 | |
Vinh Tao | 61a3dabe25 | |
Vinh Tao | b2210d3704 | |
Vinh Tao | 259ca69e93 |
|
@ -91,7 +91,7 @@ test:
|
||||||
- S3BACKEND=file S3VAULT=mem MPU_TESTING=yes npm start
|
- S3BACKEND=file S3VAULT=mem MPU_TESTING=yes npm start
|
||||||
> $CIRCLE_ARTIFACTS/server_file_awssdk.txt
|
> $CIRCLE_ARTIFACTS/server_file_awssdk.txt
|
||||||
& bash wait_for_local_port.bash 8000 40
|
& bash wait_for_local_port.bash 8000 40
|
||||||
&& npm run ft_awssdk
|
&& VERSIONING=no npm run ft_awssdk
|
||||||
- S3BACKEND=file S3VAULT=mem npm start
|
- S3BACKEND=file S3VAULT=mem npm start
|
||||||
> $CIRCLE_ARTIFACTS/server_file_s3cmd.txt
|
> $CIRCLE_ARTIFACTS/server_file_s3cmd.txt
|
||||||
& bash wait_for_local_port.bash 8000 40
|
& bash wait_for_local_port.bash 8000 40
|
||||||
|
@ -109,7 +109,7 @@ test:
|
||||||
- S3BACKEND=file S3VAULT=mem MPU_TESTING=yes npm start
|
- S3BACKEND=file S3VAULT=mem MPU_TESTING=yes npm start
|
||||||
> $CIRCLE_ARTIFACTS/server_file_kms_awssdk.txt
|
> $CIRCLE_ARTIFACTS/server_file_kms_awssdk.txt
|
||||||
& bash wait_for_local_port.bash 8000 40
|
& bash wait_for_local_port.bash 8000 40
|
||||||
&& ENABLE_KMS_ENCRYPTION=true npm run ft_awssdk
|
&& VERSIONING=no ENABLE_KMS_ENCRYPTION=true npm run ft_awssdk
|
||||||
- S3BACKEND=file S3VAULT=mem npm start
|
- S3BACKEND=file S3VAULT=mem npm start
|
||||||
> $CIRCLE_ARTIFACTS/server_file_kms_s3cmd.txt
|
> $CIRCLE_ARTIFACTS/server_file_kms_s3cmd.txt
|
||||||
& bash wait_for_local_port.bash 8000 40
|
& bash wait_for_local_port.bash 8000 40
|
||||||
|
|
33
constants.js
33
constants.js
|
@ -68,21 +68,20 @@ export default {
|
||||||
.update('', 'binary').digest('hex'),
|
.update('', 'binary').digest('hex'),
|
||||||
|
|
||||||
// Queries supported by AWS that we do not currently support.
|
// Queries supported by AWS that we do not currently support.
|
||||||
unsupportedQueries: [
|
unsupportedQueries: {
|
||||||
'accelerate',
|
'accelerate': true,
|
||||||
'analytics',
|
'analytics': true,
|
||||||
'inventory',
|
'inventory': true,
|
||||||
'lifecycle',
|
'lifecycle': true,
|
||||||
'list-type',
|
'list-type': true,
|
||||||
'logging',
|
'logging': true,
|
||||||
'metrics',
|
'metrics': true,
|
||||||
'notification',
|
'notification': true,
|
||||||
'policy',
|
'policy': true,
|
||||||
'replication',
|
'replication': true,
|
||||||
'requestPayment',
|
'requestPayment': true,
|
||||||
'restore',
|
'restore': true,
|
||||||
'tagging',
|
'tagging': true,
|
||||||
'torrent',
|
'torrent': true,
|
||||||
'versions',
|
},
|
||||||
],
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
import url from 'url';
|
||||||
import querystring from 'querystring';
|
import querystring from 'querystring';
|
||||||
|
|
||||||
import { auth, errors } from 'arsenal';
|
import { auth, errors, versioning } from 'arsenal';
|
||||||
|
|
||||||
import bucketDelete from './bucketDelete';
|
import bucketDelete from './bucketDelete';
|
||||||
import bucketDeleteCors from './bucketDeleteCors';
|
import bucketDeleteCors from './bucketDeleteCors';
|
||||||
|
@ -29,7 +30,7 @@ import objectDelete from './objectDelete';
|
||||||
import objectGet from './objectGet';
|
import objectGet from './objectGet';
|
||||||
import objectGetACL from './objectGetACL';
|
import objectGetACL from './objectGetACL';
|
||||||
import objectHead from './objectHead';
|
import objectHead from './objectHead';
|
||||||
import objectPut from './objectPut';
|
import { objectPut } from './objectPut';
|
||||||
import objectPutACL from './objectPutACL';
|
import objectPutACL from './objectPutACL';
|
||||||
import objectPutPart from './objectPutPart';
|
import objectPutPart from './objectPutPart';
|
||||||
import objectPutCopyPart from './objectPutCopyPart';
|
import objectPutCopyPart from './objectPutCopyPart';
|
||||||
|
@ -40,6 +41,8 @@ import vault from '../auth/vault';
|
||||||
import websiteGet from './websiteGet';
|
import websiteGet from './websiteGet';
|
||||||
import websiteHead from './websiteHead';
|
import websiteHead from './websiteHead';
|
||||||
|
|
||||||
|
const VID = versioning.VersionID;
|
||||||
|
|
||||||
auth.setHandler(vault);
|
auth.setHandler(vault);
|
||||||
|
|
||||||
const api = {
|
const api = {
|
||||||
|
@ -52,9 +55,11 @@ const api = {
|
||||||
}
|
}
|
||||||
let sourceBucket;
|
let sourceBucket;
|
||||||
let sourceObject;
|
let sourceObject;
|
||||||
|
let sourceVersionId = undefined;
|
||||||
if (apiMethod === 'objectCopy' || apiMethod === 'objectPutCopyPart') {
|
if (apiMethod === 'objectCopy' || apiMethod === 'objectPutCopyPart') {
|
||||||
let source =
|
const { pathname, query } =
|
||||||
querystring.unescape(request.headers['x-amz-copy-source']);
|
url.parse(request.headers['x-amz-copy-source']);
|
||||||
|
let source = querystring.unescape(pathname);
|
||||||
// If client sends the source bucket/object with a leading /,
|
// If client sends the source bucket/object with a leading /,
|
||||||
// remove it
|
// remove it
|
||||||
if (source[0] === '/') {
|
if (source[0] === '/') {
|
||||||
|
@ -67,6 +72,17 @@ const api = {
|
||||||
// Pull the source bucket and source object separated by /
|
// Pull the source bucket and source object separated by /
|
||||||
sourceBucket = source.slice(0, slashSeparator);
|
sourceBucket = source.slice(0, slashSeparator);
|
||||||
sourceObject = source.slice(slashSeparator + 1);
|
sourceObject = source.slice(slashSeparator + 1);
|
||||||
|
sourceVersionId = query ?
|
||||||
|
querystring.parse(query).versionId : undefined;
|
||||||
|
sourceVersionId = sourceVersionId || undefined;
|
||||||
|
if (sourceVersionId) {
|
||||||
|
try {
|
||||||
|
sourceVersionId = VID.decrypt(sourceVersionId);
|
||||||
|
} catch (exception) {
|
||||||
|
return callback(errors.InvalidArgument
|
||||||
|
.customizeDescription('Invalid version id specified'));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const requestContexts = prepareRequestContexts(apiMethod,
|
const requestContexts = prepareRequestContexts(apiMethod,
|
||||||
request, locationConstraint, sourceBucket, sourceObject);
|
request, locationConstraint, sourceBucket, sourceObject);
|
||||||
|
@ -91,7 +107,7 @@ const api = {
|
||||||
if (apiMethod === 'objectCopy' ||
|
if (apiMethod === 'objectCopy' ||
|
||||||
apiMethod === 'objectPutCopyPart') {
|
apiMethod === 'objectPutCopyPart') {
|
||||||
return this[apiMethod](userInfo, request, sourceBucket,
|
return this[apiMethod](userInfo, request, sourceBucket,
|
||||||
sourceObject, log, callback);
|
sourceObject, sourceVersionId, log, callback);
|
||||||
}
|
}
|
||||||
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
|
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
|
||||||
return this[apiMethod](userInfo, request, streamingV4Params,
|
return this[apiMethod](userInfo, request, streamingV4Params,
|
||||||
|
|
|
@ -111,13 +111,17 @@ export function deleteBucket(bucketMD, bucketName, canonicalID, log, cb) {
|
||||||
|
|
||||||
return async.waterfall([
|
return async.waterfall([
|
||||||
function checkForObjectsStep(next) {
|
function checkForObjectsStep(next) {
|
||||||
return metadata.listObject(bucketName, { maxKeys: 1 }, log,
|
const params = { maxKeys: 1, listingType: 'DelimiterVersions' };
|
||||||
(err, objectsListRes) => {
|
return metadata.listObject(bucketName, params, log,
|
||||||
|
(err, list) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('error from metadata', { error: err });
|
log.error('error from metadata', { error: err });
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
if (objectsListRes.Contents.length) {
|
const length = (list.Versions ? list.Versions.length : 0) +
|
||||||
|
(list.DeleteMarkers ? list.DeleteMarkers.length : 0);
|
||||||
|
log.debug('listing result', { length });
|
||||||
|
if (length) {
|
||||||
log.debug('bucket delete failed',
|
log.debug('bucket delete failed',
|
||||||
{ error: errors.BucketNotEmpty });
|
{ error: errors.BucketNotEmpty });
|
||||||
return next(errors.BucketNotEmpty);
|
return next(errors.BucketNotEmpty);
|
||||||
|
|
|
@ -5,7 +5,9 @@ import services from '../services';
|
||||||
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
||||||
import escapeForXML from '../utilities/escapeForXML';
|
import escapeForXML from '../utilities/escapeForXML';
|
||||||
import { pushMetric } from '../utapi/utilities';
|
import { pushMetric } from '../utapi/utilities';
|
||||||
import { errors } from 'arsenal';
|
import { errors, versioning } from 'arsenal';
|
||||||
|
|
||||||
|
const VID = versioning.VersionID;
|
||||||
|
|
||||||
// Sample XML response:
|
// Sample XML response:
|
||||||
/* <ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
/* <ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
|
||||||
|
@ -31,6 +33,126 @@ import { errors } from 'arsenal';
|
||||||
</CommonPrefixes>
|
</CommonPrefixes>
|
||||||
</ListBucketResult>*/
|
</ListBucketResult>*/
|
||||||
|
|
||||||
|
function processVersions(bucketName, listParams, list) {
|
||||||
|
const xml = [];
|
||||||
|
xml.push(
|
||||||
|
'<?xml version="1.0" encoding="UTF-8"?>',
|
||||||
|
'<ListVersionsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
|
||||||
|
'<Name>', bucketName, '</Name>'
|
||||||
|
);
|
||||||
|
const isTruncated = list.IsTruncated ? 'true' : 'false';
|
||||||
|
const xmlParams = [
|
||||||
|
{ tag: 'Prefix', value: listParams.prefix },
|
||||||
|
{ tag: 'KeyMarker', value: listParams.keyMarker },
|
||||||
|
{ tag: 'VersionIdMarker', value: listParams.versionIdMarker },
|
||||||
|
{ tag: 'NextKeyMarker', value: list.NextKeyMarker },
|
||||||
|
{ tag: 'NextVersionIdMarker', value: list.NextVersionIdMarker },
|
||||||
|
{ tag: 'MaxKeys', value: listParams.maxKeys },
|
||||||
|
{ tag: 'Delimiter', value: listParams.delimiter },
|
||||||
|
{ tag: 'EncodingType', value: listParams.encoding },
|
||||||
|
{ tag: 'IsTruncated', value: isTruncated },
|
||||||
|
];
|
||||||
|
|
||||||
|
const escapeXmlFn = listParams.encoding === 'url' ?
|
||||||
|
querystring.escape : escapeForXML;
|
||||||
|
xmlParams.forEach(p => {
|
||||||
|
if (p.value) {
|
||||||
|
const val = p.tag !== 'NextVersionIdMarker' || p.value === 'null' ?
|
||||||
|
p.value : VID.encrypt(p.value);
|
||||||
|
xml.push(`<${p.tag}>${escapeXmlFn(val)}</${p.tag}>`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
let lastKey = listParams.keyMarker;
|
||||||
|
list.Versions.forEach(item => {
|
||||||
|
const v = JSON.parse(item.value);
|
||||||
|
const objectKey = escapeXmlFn(item.key);
|
||||||
|
const isLatest = lastKey !== objectKey;
|
||||||
|
lastKey = objectKey;
|
||||||
|
xml.push(
|
||||||
|
v.isDeleteMarker ? '<DeleteMarker>' : '<Version>',
|
||||||
|
`<Key>${objectKey}</Key>`,
|
||||||
|
'<VersionId>',
|
||||||
|
(v.isNull || v.versionId === undefined) ?
|
||||||
|
'null' : VID.encrypt(v.versionId),
|
||||||
|
'</VersionId>',
|
||||||
|
`<IsLatest>${isLatest}</IsLatest>`,
|
||||||
|
`<LastModified>${v['last-modified']}</LastModified>`,
|
||||||
|
`<ETag>"${v['content-md5']}"</ETag>`,
|
||||||
|
`<Size>${v['content-length']}</Size>`,
|
||||||
|
'<Owner>',
|
||||||
|
`<ID>${v['owner-id']}</ID>`,
|
||||||
|
`<DisplayName>${v['owner-display-name']}</DisplayName>`,
|
||||||
|
'</Owner>',
|
||||||
|
`<StorageClass>${v['x-amz-storage-class']}</StorageClass>`,
|
||||||
|
v.isDeleteMarker ? '</DeleteMarker>' : '</Version>'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
list.CommonPrefixes.forEach(item => {
|
||||||
|
const val = escapeXmlFn(item);
|
||||||
|
xml.push(`<CommonPrefixes><Prefix>${val}</Prefix></CommonPrefixes>`);
|
||||||
|
});
|
||||||
|
xml.push('</ListVersionsResult>');
|
||||||
|
return xml.join('');
|
||||||
|
}
|
||||||
|
|
||||||
|
function processMasterVersions(bucketName, listParams, list) {
|
||||||
|
const xml = [];
|
||||||
|
xml.push(
|
||||||
|
'<?xml version="1.0" encoding="UTF-8"?>',
|
||||||
|
'<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
|
||||||
|
'<Name>', bucketName, '</Name>'
|
||||||
|
);
|
||||||
|
const isTruncated = list.IsTruncated ? 'true' : 'false';
|
||||||
|
const xmlParams = [
|
||||||
|
{ tag: 'Prefix', value: listParams.prefix || '' },
|
||||||
|
{ tag: 'Marker', value: listParams.marker || '' },
|
||||||
|
{ tag: 'NextMarker', value: list.NextMarker },
|
||||||
|
{ tag: 'MaxKeys', value: listParams.maxKeys },
|
||||||
|
{ tag: 'Delimiter', value: listParams.delimiter },
|
||||||
|
{ tag: 'EncodingType', value: listParams.encoding },
|
||||||
|
{ tag: 'IsTruncated', value: isTruncated },
|
||||||
|
];
|
||||||
|
|
||||||
|
const escapeXmlFn = listParams.encoding === 'url' ?
|
||||||
|
querystring.escape : escapeForXML;
|
||||||
|
xmlParams.forEach(p => {
|
||||||
|
if (p.value) {
|
||||||
|
xml.push(`<${p.tag}>${escapeXmlFn(p.value)}</${p.tag}>`);
|
||||||
|
} else if (p.tag !== 'NextMarker' &&
|
||||||
|
p.tag !== 'EncodingType' &&
|
||||||
|
p.tag !== 'Delimiter') {
|
||||||
|
xml.push(`<${p.tag}/>`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
list.Contents.forEach(item => {
|
||||||
|
const v = item.value;
|
||||||
|
if (v.isDeleteMarker) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const objectKey = escapeXmlFn(item.key);
|
||||||
|
return xml.push(
|
||||||
|
'<Contents>',
|
||||||
|
`<Key>${objectKey}</Key>`,
|
||||||
|
`<LastModified>${v.LastModified}</LastModified>`,
|
||||||
|
`<ETag>"${v.ETag}"</ETag>`,
|
||||||
|
`<Size>${v.Size}</Size>`,
|
||||||
|
'<Owner>',
|
||||||
|
`<ID>${v.Owner.ID}</ID>`,
|
||||||
|
`<DisplayName>${v.Owner.DisplayName}</DisplayName>`,
|
||||||
|
'</Owner>',
|
||||||
|
`<StorageClass>${v.StorageClass}</StorageClass>`,
|
||||||
|
'</Contents>'
|
||||||
|
);
|
||||||
|
});
|
||||||
|
list.CommonPrefixes.forEach(item => {
|
||||||
|
const val = escapeXmlFn(item);
|
||||||
|
xml.push(`<CommonPrefixes><Prefix>${val}</Prefix></CommonPrefixes>`);
|
||||||
|
});
|
||||||
|
xml.push('</ListBucketResult>');
|
||||||
|
return xml.join('');
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* bucketGet - Return list of objects in bucket
|
* bucketGet - Return list of objects in bucket
|
||||||
* @param {AuthInfo} authInfo - Instance of AuthInfo class with
|
* @param {AuthInfo} authInfo - Instance of AuthInfo class with
|
||||||
|
@ -50,7 +172,6 @@ export default function bucketGet(authInfo, request, log, callback) {
|
||||||
return callback(errors.InvalidArgument.customizeDescription('Invalid ' +
|
return callback(errors.InvalidArgument.customizeDescription('Invalid ' +
|
||||||
'Encoding Method specified in Request'));
|
'Encoding Method specified in Request'));
|
||||||
}
|
}
|
||||||
const escapeXmlFn = encoding === 'url' ? querystring.escape : escapeForXML;
|
|
||||||
const requestMaxKeys = params['max-keys'] ?
|
const requestMaxKeys = params['max-keys'] ?
|
||||||
Number.parseInt(params['max-keys'], 10) : 1000;
|
Number.parseInt(params['max-keys'], 10) : 1000;
|
||||||
if (Number.isNaN(requestMaxKeys) || requestMaxKeys < 0) {
|
if (Number.isNaN(requestMaxKeys) || requestMaxKeys < 0) {
|
||||||
|
@ -68,6 +189,7 @@ export default function bucketGet(authInfo, request, log, callback) {
|
||||||
log,
|
log,
|
||||||
};
|
};
|
||||||
const listParams = {
|
const listParams = {
|
||||||
|
listingType: 'Delimiter',
|
||||||
maxKeys: actualMaxKeys,
|
maxKeys: actualMaxKeys,
|
||||||
delimiter: params.delimiter,
|
delimiter: params.delimiter,
|
||||||
marker: params.marker,
|
marker: params.marker,
|
||||||
|
@ -81,70 +203,31 @@ export default function bucketGet(authInfo, request, log, callback) {
|
||||||
log.debug('error processing request', { error: err });
|
log.debug('error processing request', { error: err });
|
||||||
return callback(err, null, corsHeaders);
|
return callback(err, null, corsHeaders);
|
||||||
}
|
}
|
||||||
|
if (params.versions !== undefined) {
|
||||||
|
listParams.listingType = 'DelimiterVersions';
|
||||||
|
delete listParams.marker;
|
||||||
|
listParams.keyMarker = params['key-marker'];
|
||||||
|
listParams.versionIdMarker = params['version-id-marker'] ?
|
||||||
|
VID.decrypt(params['version-id-marker']) : undefined;
|
||||||
|
} else {
|
||||||
|
listParams.listingType = 'DelimiterMaster';
|
||||||
|
}
|
||||||
return services.getObjectListing(bucketName, listParams, log,
|
return services.getObjectListing(bucketName, listParams, log,
|
||||||
(err, list) => {
|
(err, list) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('error processing request', { error: err });
|
log.debug('error processing request', { error: err });
|
||||||
return callback(err, null, corsHeaders);
|
return callback(err, null, corsHeaders);
|
||||||
}
|
}
|
||||||
const xml = [];
|
listParams.maxKeys = requestMaxKeys;
|
||||||
xml.push(
|
listParams.encoding = encoding;
|
||||||
'<?xml version="1.0" encoding="UTF-8"?>',
|
let res = undefined;
|
||||||
'<ListBucketResult xmlns="http://s3.amazonaws.com/doc/' +
|
if (listParams.listingType === 'DelimiterVersions') {
|
||||||
'2006-03-01/">',
|
res = processVersions(bucketName, listParams, list);
|
||||||
`<Name>${bucketName}</Name>`
|
} else {
|
||||||
);
|
res = processMasterVersions(bucketName, listParams, list);
|
||||||
const isTruncated = list.IsTruncated ? 'true' : 'false';
|
}
|
||||||
const xmlParams = [
|
pushMetric('listBucket', log, { authInfo, bucket: bucketName });
|
||||||
{ tag: 'Prefix', value: listParams.prefix },
|
return callback(null, res, corsHeaders);
|
||||||
{ tag: 'NextMarker', value: list.NextMarker },
|
|
||||||
{ tag: 'Marker', value: listParams.marker },
|
|
||||||
{ tag: 'MaxKeys', value: requestMaxKeys },
|
|
||||||
{ tag: 'Delimiter', value: listParams.delimiter },
|
|
||||||
{ tag: 'EncodingType', value: encoding },
|
|
||||||
{ tag: 'IsTruncated', value: isTruncated },
|
|
||||||
];
|
|
||||||
|
|
||||||
xmlParams.forEach(p => {
|
|
||||||
if (p.value) {
|
|
||||||
xml.push(`<${p.tag}>${escapeXmlFn(p.value)}</${p.tag}>`);
|
|
||||||
} else if (p.tag !== 'NextMarker' &&
|
|
||||||
p.tag !== 'EncodingType' &&
|
|
||||||
p.tag !== 'Delimiter') {
|
|
||||||
xml.push(`<${p.tag}/>`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
list.Contents.forEach(item => {
|
|
||||||
const v = item.value;
|
|
||||||
const objectKey = escapeXmlFn(item.key);
|
|
||||||
|
|
||||||
xml.push(
|
|
||||||
'<Contents>',
|
|
||||||
`<Key>${objectKey}</Key>`,
|
|
||||||
`<LastModified>${v.LastModified}</LastModified>`,
|
|
||||||
`<ETag>"${v.ETag}"</ETag>`,
|
|
||||||
`<Size>${v.Size}</Size>`,
|
|
||||||
'<Owner>',
|
|
||||||
`<ID>${v.Owner.ID}</ID>`,
|
|
||||||
`<DisplayName>${v.Owner.DisplayName}</DisplayName>`,
|
|
||||||
'</Owner>',
|
|
||||||
`<StorageClass>${v.StorageClass}</StorageClass>`,
|
|
||||||
'</Contents>'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
list.CommonPrefixes.forEach(item => {
|
|
||||||
const val = escapeXmlFn(item);
|
|
||||||
xml.push(
|
|
||||||
`<CommonPrefixes><Prefix>${val}</Prefix></CommonPrefixes>`
|
|
||||||
);
|
|
||||||
});
|
|
||||||
xml.push('</ListBucketResult>');
|
|
||||||
pushMetric('listBucket', log, {
|
|
||||||
authInfo,
|
|
||||||
bucket: bucketName,
|
|
||||||
});
|
|
||||||
return callback(null, xml.join(''), corsHeaders);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
return undefined;
|
return undefined;
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { errors } from 'arsenal';
|
import { errors, versioning } from 'arsenal';
|
||||||
import async from 'async';
|
import async from 'async';
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
import { parseString } from 'xml2js';
|
import { parseString } from 'xml2js';
|
||||||
|
@ -13,6 +13,8 @@ import services from '../services';
|
||||||
|
|
||||||
import { logger } from '../utilities/logger';
|
import { logger } from '../utilities/logger';
|
||||||
|
|
||||||
|
const VID = versioning.VersionID;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Format of xml request:
|
Format of xml request:
|
||||||
<CompleteMultipartUpload>
|
<CompleteMultipartUpload>
|
||||||
|
@ -84,6 +86,9 @@ function completeMultipartUpload(authInfo, request, log, callback) {
|
||||||
log.debug('processing request', { method: 'completeMultipartUpload' });
|
log.debug('processing request', { method: 'completeMultipartUpload' });
|
||||||
const bucketName = request.bucketName;
|
const bucketName = request.bucketName;
|
||||||
const objectKey = request.objectKey;
|
const objectKey = request.objectKey;
|
||||||
|
let generatedVersionId = undefined;
|
||||||
|
let bucketMD = undefined;
|
||||||
|
let objectMD = undefined;
|
||||||
const hostname = request.parsedHost;
|
const hostname = request.parsedHost;
|
||||||
const uploadId = request.query.uploadId;
|
const uploadId = request.query.uploadId;
|
||||||
const metadataValParams = {
|
const metadataValParams = {
|
||||||
|
@ -127,6 +132,8 @@ function completeMultipartUpload(authInfo, request, log, callback) {
|
||||||
services.metadataValidateAuthorization(metadataValParams, next);
|
services.metadataValidateAuthorization(metadataValParams, next);
|
||||||
},
|
},
|
||||||
function waterfall2(destBucket, objMD, next) {
|
function waterfall2(destBucket, objMD, next) {
|
||||||
|
bucketMD = destBucket;
|
||||||
|
objectMD = objMD;
|
||||||
services.metadataValidateMultipart(metadataValParams,
|
services.metadataValidateMultipart(metadataValParams,
|
||||||
(err, mpuBucket) => {
|
(err, mpuBucket) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -380,22 +387,43 @@ function completeMultipartUpload(authInfo, request, log, callback) {
|
||||||
masterKeyId: destinationBucket.getSseMasterKeyId(),
|
masterKeyId: destinationBucket.getSseMasterKeyId(),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
services.metadataStoreObject(destinationBucket.getName(),
|
let versioningOptions = undefined;
|
||||||
dataLocations, pseudoCipherBundle, metaStoreParams, err => {
|
async.waterfall([
|
||||||
if (err) {
|
callback => services.versioningPreprocessing(bucketName,
|
||||||
return next(err, destinationBucket);
|
bucketMD, objectKey, objectMD, null, log, callback),
|
||||||
}
|
(options, callback) => {
|
||||||
if (objMD && objMD.location) {
|
versioningOptions = options;
|
||||||
const dataToDelete = Array.isArray(objMD.location) ?
|
// eslint-disable-next-line
|
||||||
objMD.location : [objMD.location];
|
metaStoreParams.versionId = options.versionId;
|
||||||
data.batchDelete(dataToDelete, logger
|
// eslint-disable-next-line
|
||||||
.newRequestLoggerFromSerializedUids(log
|
metaStoreParams.versioning = options.versioning;
|
||||||
.getSerializedUids()));
|
// eslint-disable-next-line
|
||||||
}
|
metaStoreParams.isNull = options.isNull;
|
||||||
return next(null, mpuBucket, mpuOverviewKey,
|
// eslint-disable-next-line
|
||||||
aggregateETag, storedPartsAsObjects,
|
metaStoreParams.nullVersionId = options.nullVersionId;
|
||||||
extraPartLocations, destinationBucket);
|
services.metadataStoreObject(
|
||||||
});
|
destinationBucket.getName(), dataLocations,
|
||||||
|
pseudoCipherBundle, metaStoreParams, callback);
|
||||||
|
},
|
||||||
|
], (err, res) => {
|
||||||
|
// TODO to include the versioning information in result
|
||||||
|
if (err) {
|
||||||
|
return next(err, destinationBucket);
|
||||||
|
}
|
||||||
|
if (res && res.versionId) {
|
||||||
|
generatedVersionId = res.versionId;
|
||||||
|
}
|
||||||
|
if (objMD && objMD.location && versioningOptions.deleteData) {
|
||||||
|
const dataToDelete = Array.isArray(objMD.location) ?
|
||||||
|
objMD.location : [objMD.location];
|
||||||
|
data.batchDelete(dataToDelete, logger
|
||||||
|
.newRequestLoggerFromSerializedUids(log
|
||||||
|
.getSerializedUids()));
|
||||||
|
}
|
||||||
|
return next(null, mpuBucket, mpuOverviewKey,
|
||||||
|
aggregateETag, storedPartsAsObjects,
|
||||||
|
extraPartLocations, destinationBucket);
|
||||||
|
});
|
||||||
},
|
},
|
||||||
function waterfall8(mpuBucket, mpuOverviewKey, aggregateETag,
|
function waterfall8(mpuBucket, mpuOverviewKey, aggregateETag,
|
||||||
storedPartsAsObjects, extraPartLocations, destinationBucket, next) {
|
storedPartsAsObjects, extraPartLocations, destinationBucket, next) {
|
||||||
|
@ -417,6 +445,9 @@ function completeMultipartUpload(authInfo, request, log, callback) {
|
||||||
if (err) {
|
if (err) {
|
||||||
return callback(err, null, corsHeaders);
|
return callback(err, null, corsHeaders);
|
||||||
}
|
}
|
||||||
|
if (generatedVersionId) {
|
||||||
|
corsHeaders['x-amz-version-id'] = VID.encrypt(generatedVersionId);
|
||||||
|
}
|
||||||
xmlParams.ETag = `"${aggregateETag}"`;
|
xmlParams.ETag = `"${aggregateETag}"`;
|
||||||
const xml = _convertToXml(xmlParams);
|
const xml = _convertToXml(xmlParams);
|
||||||
pushMetric('completeMultipartUpload', log, {
|
pushMetric('completeMultipartUpload', log, {
|
||||||
|
|
|
@ -213,7 +213,7 @@ export default function listMultipartUploads(authInfo,
|
||||||
maxKeys: maxUploads,
|
maxKeys: maxUploads,
|
||||||
prefix: `overview${splitter}${prefix}`,
|
prefix: `overview${splitter}${prefix}`,
|
||||||
queryPrefixLength: prefix.length,
|
queryPrefixLength: prefix.length,
|
||||||
listingType: 'multipartuploads',
|
listingType: 'MPU',
|
||||||
splitter,
|
splitter,
|
||||||
};
|
};
|
||||||
services.getMultipartUploadListing(mpuBucketName, listingParams,
|
services.getMultipartUploadListing(mpuBucketName, listingParams,
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import crypto from 'crypto';
|
import crypto from 'crypto';
|
||||||
|
|
||||||
import async from 'async';
|
import async from 'async';
|
||||||
import { auth, errors } from 'arsenal';
|
import { auth, errors, versioning } from 'arsenal';
|
||||||
import { parseString } from 'xml2js';
|
import { parseString } from 'xml2js';
|
||||||
|
|
||||||
import escapeForXML from '../utilities/escapeForXML';
|
import escapeForXML from '../utilities/escapeForXML';
|
||||||
|
@ -12,6 +12,9 @@ import metadata from '../metadata/wrapper';
|
||||||
import services from '../services';
|
import services from '../services';
|
||||||
import vault from '../auth/vault';
|
import vault from '../auth/vault';
|
||||||
import { isBucketAuthorized } from './apiUtils/authorization/aclChecks';
|
import { isBucketAuthorized } from './apiUtils/authorization/aclChecks';
|
||||||
|
import { createAndStoreObject } from './objectPut';
|
||||||
|
|
||||||
|
const VID = versioning.VersionID;
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -50,8 +53,12 @@ import { isBucketAuthorized } from './apiUtils/authorization/aclChecks';
|
||||||
* @param {boolean} quietSetting - true if xml should just include error list
|
* @param {boolean} quietSetting - true if xml should just include error list
|
||||||
* and false if should include deleted list and error list
|
* and false if should include deleted list and error list
|
||||||
* @param {object []} errorResults - list of error result objects with each
|
* @param {object []} errorResults - list of error result objects with each
|
||||||
* object containing -- key: objectName, error: arsenal error
|
* object containing -- entry: { key, versionId }, error: arsenal error
|
||||||
* @param {string []} deleted - list of object keys deleted
|
* @param {object []} deleted - list of object deleted, an object has the format
|
||||||
|
* object: { entry, result, isDeletingDeleteMarker }
|
||||||
|
* object.entry : above
|
||||||
|
* object.result: stringification of { versionId }
|
||||||
|
* object.isDeletingDeleteMarker: name as comment
|
||||||
* @return {string} xml string
|
* @return {string} xml string
|
||||||
*/
|
*/
|
||||||
function _formatXML(quietSetting, errorResults, deleted) {
|
function _formatXML(quietSetting, errorResults, deleted) {
|
||||||
|
@ -59,9 +66,14 @@ function _formatXML(quietSetting, errorResults, deleted) {
|
||||||
errorResults.forEach(errorObj => {
|
errorResults.forEach(errorObj => {
|
||||||
errorXML.push(
|
errorXML.push(
|
||||||
'<Error>',
|
'<Error>',
|
||||||
'<Key>', escapeForXML(errorObj.key), '</Key>',
|
'<Key>', escapeForXML(errorObj.entry.key), '</Key>',
|
||||||
'<Code>', errorObj.error.message, '</Code>',
|
'<Code>', errorObj.error.message, '</Code>');
|
||||||
'<Message>', errorObj.error.description, '</Message>',
|
if (errorObj.entry.versionId) {
|
||||||
|
const version = errorObj.entry.versionId === 'null' ?
|
||||||
|
'null' : escapeForXML(errorObj.entry.versionId);
|
||||||
|
errorXML.push('<VersionId>', version, '</VersionId>');
|
||||||
|
}
|
||||||
|
errorXML.push('<Message>', errorObj.error.description, '</Message>',
|
||||||
'</Error>'
|
'</Error>'
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
@ -79,12 +91,39 @@ function _formatXML(quietSetting, errorResults, deleted) {
|
||||||
return xml.join('');
|
return xml.join('');
|
||||||
}
|
}
|
||||||
const deletedXML = [];
|
const deletedXML = [];
|
||||||
deleted.forEach(objKey => {
|
deleted.forEach(version => {
|
||||||
|
// TODO include isDeletingDeleteMarker in the result
|
||||||
|
const isDeleteMarker = !!version.result;
|
||||||
|
const isDeletingDeleteMarker = version.isDeletingDeleteMarker;
|
||||||
deletedXML.push(
|
deletedXML.push(
|
||||||
'<Deleted>',
|
'<Deleted>',
|
||||||
'<Key>', escapeForXML(objKey), '</Key>',
|
'<Key>',
|
||||||
'</Deleted>'
|
escapeForXML(version.entry.key),
|
||||||
|
'</Key>'
|
||||||
);
|
);
|
||||||
|
if (version.entry.versionId) {
|
||||||
|
deletedXML.push(
|
||||||
|
'<VersionId>',
|
||||||
|
version.entry.versionId === 'null' ?
|
||||||
|
'null' : VID.encrypt(escapeForXML(version.entry.versionId)),
|
||||||
|
'</VersionId>'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (isDeleteMarker) {
|
||||||
|
deletedXML.push(
|
||||||
|
'<DeleteMarker>',
|
||||||
|
isDeleteMarker,
|
||||||
|
'</DeleteMarker>'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (isDeletingDeleteMarker) {
|
||||||
|
deletedXML.push(
|
||||||
|
'<DeleteMarkerVersionId>',
|
||||||
|
isDeletingDeleteMarker,
|
||||||
|
'</DeleteMarkerVersionId>'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
deletedXML.push('</Deleted>');
|
||||||
});
|
});
|
||||||
xml[2] = deletedXML.join('');
|
xml[2] = deletedXML.join('');
|
||||||
return xml.join('');
|
return xml.join('');
|
||||||
|
@ -92,21 +131,54 @@ function _formatXML(quietSetting, errorResults, deleted) {
|
||||||
|
|
||||||
function _parseXml(xmlToParse, next) {
|
function _parseXml(xmlToParse, next) {
|
||||||
return parseString(xmlToParse, (err, result) => {
|
return parseString(xmlToParse, (err, result) => {
|
||||||
|
let itemError = null;
|
||||||
if (err || !result || !result.Delete) {
|
if (err || !result || !result.Delete) {
|
||||||
return next(errors.MalformedXML);
|
return next(errors.MalformedXML);
|
||||||
}
|
}
|
||||||
const json = result.Delete;
|
const json = result.Delete;
|
||||||
// not quiet is the default if nothing specified
|
// not quiet is the default if nothing specified
|
||||||
const quietSetting = json.Quiet && json.Quiet[0] === 'true';
|
const quietSetting = json.Quiet && json.Quiet[0] === 'true';
|
||||||
// format of json is {"Object":[{"Key":["test1"]},{"Key":["test2"]}]}
|
// format of json is
|
||||||
const objects = json.Object.map(item => item.Key[0]);
|
// {"Object":[
|
||||||
return next(null, quietSetting, objects);
|
// {"Key":["test1"],"VersionId":["vid"]},
|
||||||
|
// {"Key":["test2"]}
|
||||||
|
// ]}
|
||||||
|
const objects = [];
|
||||||
|
const itemErrors = [];
|
||||||
|
for (let i = 0; i < json.Object.length; i++) {
|
||||||
|
const item = json.Object[i];
|
||||||
|
if (!item.Key) {
|
||||||
|
return next(errors.MalformedXML);
|
||||||
|
}
|
||||||
|
const object = { key: item.Key[0] };
|
||||||
|
// TODO check aws behaviour, maybe returning InvalidArgument
|
||||||
|
if (item.VersionId) {
|
||||||
|
try {
|
||||||
|
object.versionId = item.VersionId[0] === 'null' ?
|
||||||
|
'null' : VID.decrypt(item.VersionId[0]);
|
||||||
|
} catch (exception) {
|
||||||
|
itemError = errors.NoSuchVersion;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (itemError) {
|
||||||
|
itemErrors.push({ key: item.Key, versionId: item.VersionId,
|
||||||
|
error: itemError });
|
||||||
|
itemError = null;
|
||||||
|
} else {
|
||||||
|
objects.push(object);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return next(null, quietSetting, objects, itemErrors);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* gets object metadata and deletes object
|
* gets object metadata and deletes object
|
||||||
|
* @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info
|
||||||
|
* @param {string} canonicalID - canonicalId of requester
|
||||||
|
* @param {object} request - http request
|
||||||
* @param {string} bucketName - bucketName
|
* @param {string} bucketName - bucketName
|
||||||
|
* @param {BucketInfo} bucket - bucket
|
||||||
* @param {boolean} quietSetting - true if xml should just include error list
|
* @param {boolean} quietSetting - true if xml should just include error list
|
||||||
* and false if should include deleted list and error list
|
* and false if should include deleted list and error list
|
||||||
* @param {object []} errorResults - list of error result objects with each
|
* @param {object []} errorResults - list of error result objects with each
|
||||||
|
@ -118,8 +190,8 @@ function _parseXml(xmlToParse, next) {
|
||||||
* @callback called with (err, quietSetting, errorResults, numOfObjects,
|
* @callback called with (err, quietSetting, errorResults, numOfObjects,
|
||||||
* successfullyDeleted, totalContentLengthDeleted)
|
* successfullyDeleted, totalContentLengthDeleted)
|
||||||
*/
|
*/
|
||||||
export function getObjMetadataAndDelete(bucketName, quietSetting,
|
export function getObjMetadataAndDelete(authInfo, canonicalID, request,
|
||||||
errorResults, inPlay, log, next) {
|
bucketName, bucket, quietSetting, errorResults, inPlay, log, next) {
|
||||||
const successfullyDeleted = [];
|
const successfullyDeleted = [];
|
||||||
let totalContentLengthDeleted = 0;
|
let totalContentLengthDeleted = 0;
|
||||||
let numOfObjects = 0;
|
let numOfObjects = 0;
|
||||||
|
@ -128,48 +200,59 @@ export function getObjMetadataAndDelete(bucketName, quietSetting,
|
||||||
|
|
||||||
// doing 5 requests at a time. note that the data wrapper
|
// doing 5 requests at a time. note that the data wrapper
|
||||||
// will do 5 parallel requests to data backend to delete parts
|
// will do 5 parallel requests to data backend to delete parts
|
||||||
return async.forEachLimit(inPlay, 5, (key, moveOn) => {
|
return async.forEachLimit(inPlay, 5, (entry, moveOn) => {
|
||||||
metadata.getObjectMD(bucketName, key, {}, log, (err, objMD) => {
|
const opts = { versionId: entry.versionId };
|
||||||
|
metadata.getObjectMD(bucketName, entry.key, opts, log, (err, objMD) => {
|
||||||
// if general error from metadata return error
|
// if general error from metadata return error
|
||||||
if (err && !err.NoSuchKey) {
|
if (err && !err.NoSuchKey) {
|
||||||
log.error('error getting object MD', { error: err, key });
|
log.error('error getting object MD',
|
||||||
errorResults.push({
|
{ error: err, key: entry.key });
|
||||||
key,
|
errorResults.push({ entry, error: err });
|
||||||
error: err,
|
|
||||||
});
|
|
||||||
return moveOn();
|
return moveOn();
|
||||||
}
|
}
|
||||||
// if particular key does not exist, AWS returns success
|
// if particular key does not exist, AWS returns success
|
||||||
// for key so add to successfullyDeleted list and move on
|
// for key so add to successfullyDeleted list and move on
|
||||||
if (err && err.NoSuchKey) {
|
if (err && err.NoSuchKey) {
|
||||||
successfullyDeleted.push(key);
|
successfullyDeleted.push({ entry });
|
||||||
return moveOn();
|
return moveOn();
|
||||||
}
|
}
|
||||||
return services.deleteObject(bucketName, objMD, key, log,
|
let deleted = false;
|
||||||
err => {
|
return async.waterfall([
|
||||||
if (err) {
|
callback => services.preprocessingVersioningDelete(bucketName,
|
||||||
log.error('error deleting object', { error: err, key });
|
bucket, entry.key, objMD, entry.versionId, log, callback),
|
||||||
errorResults.push({
|
(options, callback) => {
|
||||||
key,
|
if (options && options.deleteData) {
|
||||||
error: err,
|
deleted = true;
|
||||||
});
|
return services.deleteObject(bucketName, objMD,
|
||||||
return moveOn();
|
entry.key, options, log, callback);
|
||||||
}
|
}
|
||||||
if (objMD['content-length']) {
|
request.isDeleteMarker = true; // eslint-disable-line
|
||||||
totalContentLengthDeleted +=
|
// TODO need authInfo and canonicalID
|
||||||
objMD['content-length'];
|
return createAndStoreObject(bucketName, bucket, entry.key,
|
||||||
}
|
objMD, authInfo, canonicalID, null, request, null,
|
||||||
numOfObjects++;
|
log, callback);
|
||||||
successfullyDeleted.push(key);
|
},
|
||||||
|
], (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
log.error('error deleting object', { error: err, entry });
|
||||||
|
errorResults.push({ entry, error: err });
|
||||||
return moveOn();
|
return moveOn();
|
||||||
});
|
}
|
||||||
|
if (deleted && objMD['content-length']) {
|
||||||
|
totalContentLengthDeleted += objMD['content-length'];
|
||||||
|
}
|
||||||
|
numOfObjects++;
|
||||||
|
successfullyDeleted.push({ entry, result: res,
|
||||||
|
isDeletingDeleteMarker: objMD.isDeleteMarker });
|
||||||
|
return moveOn();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
// end of forEach func
|
// end of forEach func
|
||||||
err => {
|
err => {
|
||||||
log.trace('finished deleting objects', { numOfObjects });
|
log.trace('finished deleting objects', { numOfObjects });
|
||||||
return next(err, quietSetting, errorResults, numOfObjects,
|
return next(err, quietSetting, errorResults, numOfObjects,
|
||||||
successfullyDeleted, totalContentLengthDeleted);
|
successfullyDeleted, totalContentLengthDeleted, bucket);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -205,17 +288,28 @@ function multiObjectDelete(authInfo, request, log, callback) {
|
||||||
|
|
||||||
return async.waterfall([
|
return async.waterfall([
|
||||||
function parseXML(next) {
|
function parseXML(next) {
|
||||||
return _parseXml(request.post, (err, quietSetting, objects) => {
|
return _parseXml(request.post,
|
||||||
if (err || objects.length < 1 || objects.length > 1000) {
|
(err, quietSetting, objects, itemErrors) => {
|
||||||
return next(errors.MalformedXML);
|
const len = objects.length + itemErrors.length;
|
||||||
}
|
if (err || len < 1 || len > 1000) {
|
||||||
return next(null, quietSetting, objects);
|
return next(errors.MalformedXML);
|
||||||
});
|
}
|
||||||
|
return next(null, quietSetting, objects, itemErrors);
|
||||||
|
});
|
||||||
},
|
},
|
||||||
function checkPolicies(quietSetting, objects, next) {
|
function checkPolicies(quietSetting, objects, itemErrors, next) {
|
||||||
// track the error results for any keys with
|
// track the error results for any keys with
|
||||||
// an error response
|
// an error response
|
||||||
const errorResults = [];
|
const errorResults = [];
|
||||||
|
itemErrors.forEach(error => {
|
||||||
|
errorResults.push({
|
||||||
|
entry: {
|
||||||
|
key: error.key,
|
||||||
|
versionId: error.versionId,
|
||||||
|
},
|
||||||
|
error: error.error,
|
||||||
|
});
|
||||||
|
});
|
||||||
// track keys that are still on track to be deleted
|
// track keys that are still on track to be deleted
|
||||||
const inPlay = [];
|
const inPlay = [];
|
||||||
// if request from account, no need to check policies
|
// if request from account, no need to check policies
|
||||||
|
@ -246,16 +340,16 @@ function multiObjectDelete(authInfo, request, log, callback) {
|
||||||
signatureAge: authParams.params.data.signatureAge,
|
signatureAge: authParams.params.data.signatureAge,
|
||||||
},
|
},
|
||||||
parameterize: {
|
parameterize: {
|
||||||
specificResource: objects,
|
specificResource: objects.map(entry => entry.key),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
return vault.checkPolicies(requestContextParams, authInfo.getArn(),
|
return vault.checkPolicies(requestContextParams, authInfo.getArn(),
|
||||||
log, (err, authorizationResults) => {
|
log, (err, authorizationResults) => {
|
||||||
// there were no policies so received a blanket AccessDenied
|
// there were no policies so received a blanket AccessDenied
|
||||||
if (err && err.AccessDenied) {
|
if (err && err.AccessDenied) {
|
||||||
objects.forEach(key => {
|
objects.forEach(entry => {
|
||||||
errorResults.push({
|
errorResults.push({
|
||||||
key,
|
entry,
|
||||||
error: errors.AccessDenied });
|
error: errors.AccessDenied });
|
||||||
});
|
});
|
||||||
// send empty array for inPlay
|
// send empty array for inPlay
|
||||||
|
@ -287,12 +381,11 @@ function multiObjectDelete(authInfo, request, log, callback) {
|
||||||
log.error('wrong arn format from vault');
|
log.error('wrong arn format from vault');
|
||||||
return next(errors.InternalError);
|
return next(errors.InternalError);
|
||||||
}
|
}
|
||||||
const key = result.arn.slice(slashIndex + 1);
|
|
||||||
if (result.isAllowed) {
|
if (result.isAllowed) {
|
||||||
inPlay.push(key);
|
inPlay.push(objects[i]);
|
||||||
} else {
|
} else {
|
||||||
errorResults.push({
|
errorResults.push({
|
||||||
key,
|
entry: objects[i],
|
||||||
error: errors.AccessDenied,
|
error: errors.AccessDenied,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -327,9 +420,9 @@ function multiObjectDelete(authInfo, request, log, callback) {
|
||||||
log.trace("access denied due to bucket acl's");
|
log.trace("access denied due to bucket acl's");
|
||||||
// if access denied at the bucket level, no access for
|
// if access denied at the bucket level, no access for
|
||||||
// any of the objects so all results will be error results
|
// any of the objects so all results will be error results
|
||||||
inPlay.forEach(key => {
|
inPlay.forEach(entry => {
|
||||||
errorResults.push({
|
errorResults.push({
|
||||||
key,
|
entry,
|
||||||
error: errors.AccessDenied,
|
error: errors.AccessDenied,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -344,14 +437,9 @@ function multiObjectDelete(authInfo, request, log, callback) {
|
||||||
},
|
},
|
||||||
function getObjMetadataAndDeleteStep(quietSetting, errorResults, inPlay,
|
function getObjMetadataAndDeleteStep(quietSetting, errorResults, inPlay,
|
||||||
bucket, next) {
|
bucket, next) {
|
||||||
return getObjMetadataAndDelete(bucketName, quietSetting,
|
return getObjMetadataAndDelete(authInfo, canonicalID, request,
|
||||||
errorResults, inPlay, log, (err, quietSetting, errorResults,
|
bucketName, bucket, quietSetting, errorResults, inPlay,
|
||||||
numOfObjects, successfullyDeleted,
|
log, next);
|
||||||
totalContentLengthDeleted) => {
|
|
||||||
next(err, quietSetting, errorResults,
|
|
||||||
numOfObjects, successfullyDeleted,
|
|
||||||
totalContentLengthDeleted, bucket);
|
|
||||||
});
|
|
||||||
},
|
},
|
||||||
], (err, quietSetting, errorResults, numOfObjects,
|
], (err, quietSetting, errorResults, numOfObjects,
|
||||||
successfullyDeleted, totalContentLengthDeleted, bucket) => {
|
successfullyDeleted, totalContentLengthDeleted, bucket) => {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import async from 'async';
|
import async from 'async';
|
||||||
import { errors } from 'arsenal';
|
import { errors, versioning } from 'arsenal';
|
||||||
|
|
||||||
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
||||||
import data from '../data/wrapper';
|
import data from '../data/wrapper';
|
||||||
|
@ -11,6 +11,8 @@ import validateHeaders from '../utilities/validateHeaders';
|
||||||
import { pushMetric } from '../utapi/utilities';
|
import { pushMetric } from '../utapi/utilities';
|
||||||
import removeAWSChunked from './apiUtils/object/removeAWSChunked';
|
import removeAWSChunked from './apiUtils/object/removeAWSChunked';
|
||||||
|
|
||||||
|
const VID = versioning.VersionID;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Preps metadata to be saved (based on copy or replace request header)
|
* Preps metadata to be saved (based on copy or replace request header)
|
||||||
* @param {object} sourceObjMD - object md of source object
|
* @param {object} sourceObjMD - object md of source object
|
||||||
|
@ -99,14 +101,15 @@ function _prepMetadata(sourceObjMD, headers, sourceIsDestination, authInfo,
|
||||||
* includes normalized headers
|
* includes normalized headers
|
||||||
* @param {string} sourceBucket - name of source bucket for object copy
|
* @param {string} sourceBucket - name of source bucket for object copy
|
||||||
* @param {string} sourceObject - name of source object for object copy
|
* @param {string} sourceObject - name of source object for object copy
|
||||||
|
* @param {string} sourceVersionId - versionId of source object for copy
|
||||||
* @param {object} log - the log request
|
* @param {object} log - the log request
|
||||||
* @param {function} callback - final callback to call with the result
|
* @param {function} callback - final callback to call with the result
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
export default
|
export default
|
||||||
function objectCopy(authInfo, request, sourceBucket,
|
function objectCopy(authInfo, request, sourceBucket,
|
||||||
sourceObject, log, callback) {
|
sourceObject, sourceVersionId, log, callback) {
|
||||||
log.debug('processing request', { method: 'objectCopy' });
|
log.info('processing request', { method: 'objectCopy' });
|
||||||
const destBucketName = request.bucketName;
|
const destBucketName = request.bucketName;
|
||||||
const destObjectKey = request.objectKey;
|
const destObjectKey = request.objectKey;
|
||||||
const sourceIsDestination =
|
const sourceIsDestination =
|
||||||
|
@ -115,6 +118,7 @@ function objectCopy(authInfo, request, sourceBucket,
|
||||||
authInfo,
|
authInfo,
|
||||||
bucketName: sourceBucket,
|
bucketName: sourceBucket,
|
||||||
objectKey: sourceObject,
|
objectKey: sourceObject,
|
||||||
|
versionId: sourceVersionId,
|
||||||
requestType: 'objectGet',
|
requestType: 'objectGet',
|
||||||
log,
|
log,
|
||||||
};
|
};
|
||||||
|
@ -169,18 +173,20 @@ function objectCopy(authInfo, request, sourceBucket,
|
||||||
}
|
}
|
||||||
if (!sourceObjMD) {
|
if (!sourceObjMD) {
|
||||||
log.debug('no source object', { sourceObject });
|
log.debug('no source object', { sourceObject });
|
||||||
return next(errors.NoSuchKey, destBucketMD);
|
return next(errors.NoSuchKey, null, destBucketMD);
|
||||||
}
|
}
|
||||||
const headerValResult =
|
const headerValResult =
|
||||||
validateHeaders(sourceObjMD, request.headers);
|
validateHeaders(sourceObjMD, request.headers);
|
||||||
if (headerValResult.error) {
|
if (headerValResult.error) {
|
||||||
return next(errors.PreconditionFailed, destBucketMD);
|
return next(errors.PreconditionFailed, null,
|
||||||
|
destBucketMD);
|
||||||
}
|
}
|
||||||
const storeMetadataParams =
|
const storeMetadataParams =
|
||||||
_prepMetadata(sourceObjMD, request.headers,
|
_prepMetadata(sourceObjMD, request.headers,
|
||||||
sourceIsDestination, authInfo, destObjectKey, log);
|
sourceIsDestination, authInfo, destObjectKey, log);
|
||||||
if (storeMetadataParams.error) {
|
if (storeMetadataParams.error) {
|
||||||
return next(storeMetadataParams.error, destBucketMD);
|
return next(storeMetadataParams.error, null,
|
||||||
|
destBucketMD);
|
||||||
}
|
}
|
||||||
let dataLocator;
|
let dataLocator;
|
||||||
// If 0 byte object just set dataLocator to empty array
|
// If 0 byte object just set dataLocator to empty array
|
||||||
|
@ -209,7 +215,6 @@ function objectCopy(authInfo, request, sourceBucket,
|
||||||
function goGetData(storeMetadataParams, dataLocator, destBucketMD,
|
function goGetData(storeMetadataParams, dataLocator, destBucketMD,
|
||||||
destObjMD, next) {
|
destObjMD, next) {
|
||||||
const serverSideEncryption = destBucketMD.getServerSideEncryption();
|
const serverSideEncryption = destBucketMD.getServerSideEncryption();
|
||||||
|
|
||||||
// skip if source and dest the same or 0 byte object
|
// skip if source and dest the same or 0 byte object
|
||||||
// still send along serverSideEncryption info so algo
|
// still send along serverSideEncryption info so algo
|
||||||
// and masterKeyId stored properly in metadata
|
// and masterKeyId stored properly in metadata
|
||||||
|
@ -285,34 +290,51 @@ function objectCopy(authInfo, request, sourceBucket,
|
||||||
},
|
},
|
||||||
function storeNewMetadata(storeMetadataParams, destDataGetInfoArr,
|
function storeNewMetadata(storeMetadataParams, destDataGetInfoArr,
|
||||||
destObjMD, serverSideEncryption, destBucketMD, next) {
|
destObjMD, serverSideEncryption, destBucketMD, next) {
|
||||||
return services.metadataStoreObject(destBucketName,
|
let versioningOptions = undefined;
|
||||||
destDataGetInfoArr,
|
async.waterfall([
|
||||||
serverSideEncryption, storeMetadataParams, err => {
|
callback => services.versioningPreprocessing(destBucketName,
|
||||||
if (err) {
|
destBucketMD, destObjectKey, destObjMD, null, log,
|
||||||
log.debug('error storing new metadata', { error: err });
|
callback),
|
||||||
return next(err, destBucketMD);
|
(options, callback) => {
|
||||||
}
|
versioningOptions = options;
|
||||||
// Clean up any potential orphans in data if object
|
// eslint-disable-next-line
|
||||||
// put is an overwrite of already existing
|
storeMetadataParams.versionId = options.versionId;
|
||||||
// object with same name
|
// eslint-disable-next-line
|
||||||
// so long as the source is not the same as the destination
|
storeMetadataParams.versioning = options.versioning;
|
||||||
let dataToDelete;
|
// eslint-disable-next-line
|
||||||
if (destObjMD && destObjMD.location &&
|
storeMetadataParams.isNull = options.isNull;
|
||||||
!sourceIsDestination) {
|
// eslint-disable-next-line
|
||||||
dataToDelete = Array.isArray(destObjMD.location) ?
|
storeMetadataParams.nullVersionId = options.nullVersionId;
|
||||||
destObjMD.location : [destObjMD.location];
|
services.metadataStoreObject(destBucketName,
|
||||||
data.batchDelete(dataToDelete,
|
destDataGetInfoArr, serverSideEncryption,
|
||||||
|
storeMetadataParams, callback);
|
||||||
|
},
|
||||||
|
], (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
log.debug('error storing new metadata', { error: err });
|
||||||
|
return next(err, destBucketMD);
|
||||||
|
}
|
||||||
|
// Clean up any potential orphans in data if object
|
||||||
|
// put is an overwrite of already existing
|
||||||
|
// object with same name
|
||||||
|
// so long as the source is not the same as the destination
|
||||||
|
let dataToDelete = undefined;
|
||||||
|
if (destObjMD && destObjMD.location &&
|
||||||
|
!sourceIsDestination && versioningOptions.deleteData) {
|
||||||
|
dataToDelete = Array.isArray(destObjMD.location) ?
|
||||||
|
destObjMD.location : [destObjMD.location];
|
||||||
|
data.batchDelete(dataToDelete,
|
||||||
logger.newRequestLoggerFromSerializedUids(
|
logger.newRequestLoggerFromSerializedUids(
|
||||||
log.getSerializedUids()));
|
log.getSerializedUids()));
|
||||||
}
|
}
|
||||||
const sourceObjSize = storeMetadataParams.size;
|
const sourceObjSize = storeMetadataParams.size;
|
||||||
const destObjPrevSize = destObjMD ?
|
const destObjPrevSize = destObjMD ?
|
||||||
destObjMD['content-length'] : null;
|
destObjMD['content-length'] : null;
|
||||||
return next(null, destBucketMD, storeMetadataParams,
|
return next(null, res, destBucketMD, storeMetadataParams,
|
||||||
serverSideEncryption, sourceObjSize, destObjPrevSize);
|
serverSideEncryption, sourceObjSize, destObjPrevSize);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
], (err, destBucketMD, storeMetadataParams, serverSideEncryption,
|
], (err, res, destBucketMD, storeMetadataParams, serverSideEncryption,
|
||||||
sourceObjSize, destObjPrevSize) => {
|
sourceObjSize, destObjPrevSize) => {
|
||||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||||
request.method, destBucketMD);
|
request.method, destBucketMD);
|
||||||
|
@ -338,6 +360,13 @@ function objectCopy(authInfo, request, sourceBucket,
|
||||||
serverSideEncryption.masterKeyId;
|
serverSideEncryption.masterKeyId;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (sourceVersionId) {
|
||||||
|
additionalHeaders['x-amz-copy-source-version-id'] =
|
||||||
|
VID.encrypt(sourceVersionId);
|
||||||
|
}
|
||||||
|
if (res && res.versionId) {
|
||||||
|
additionalHeaders['x-amz-version-id'] = VID.encrypt(res.versionId);
|
||||||
|
}
|
||||||
pushMetric('copyObject', log, {
|
pushMetric('copyObject', log, {
|
||||||
authInfo,
|
authInfo,
|
||||||
bucket: destBucketName,
|
bucket: destBucketName,
|
||||||
|
|
|
@ -1,10 +1,14 @@
|
||||||
import { errors } from 'arsenal';
|
import { errors, versioning } from 'arsenal';
|
||||||
|
import async from 'async';
|
||||||
|
|
||||||
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
||||||
import services from '../services';
|
import services from '../services';
|
||||||
import validateHeaders from '../utilities/validateHeaders';
|
import validateHeaders from '../utilities/validateHeaders';
|
||||||
import { pushMetric } from '../utapi/utilities';
|
import { pushMetric } from '../utapi/utilities';
|
||||||
|
import { cleanUpBucket } from './apiUtils/bucket/bucketCreation';
|
||||||
|
import { createAndStoreObject } from './objectPut';
|
||||||
|
|
||||||
|
const VID = versioning.VersionID;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* objectDelete - DELETE an object from a bucket
|
* objectDelete - DELETE an object from a bucket
|
||||||
|
@ -24,48 +28,124 @@ export default function objectDelete(authInfo, request, log, cb) {
|
||||||
}
|
}
|
||||||
const bucketName = request.bucketName;
|
const bucketName = request.bucketName;
|
||||||
const objectKey = request.objectKey;
|
const objectKey = request.objectKey;
|
||||||
|
let reqVersionId = request.query ? request.query.versionId : undefined;
|
||||||
|
|
||||||
|
if (reqVersionId && reqVersionId !== 'null') {
|
||||||
|
try {
|
||||||
|
reqVersionId = VID.decrypt(reqVersionId);
|
||||||
|
} catch (exception) { // eslint-disable-line
|
||||||
|
return cb(errors.InvalidArgument.customizeDescription(
|
||||||
|
'Invalid version id specified'), null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const valParams = {
|
const valParams = {
|
||||||
authInfo,
|
authInfo,
|
||||||
bucketName,
|
bucketName,
|
||||||
objectKey,
|
objectKey,
|
||||||
|
versionId: reqVersionId || undefined,
|
||||||
requestType: 'objectDelete',
|
requestType: 'objectDelete',
|
||||||
log,
|
log,
|
||||||
};
|
};
|
||||||
return services.metadataValidateAuthorization(valParams,
|
|
||||||
(err, bucket, objMD) => {
|
const canonicalID = authInfo.getCanonicalID();
|
||||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
let bucketMD = undefined;
|
||||||
request.method, bucket);
|
let objectMD = undefined;
|
||||||
if (err) {
|
let corsHeaders = undefined;
|
||||||
log.debug('error processing request', {
|
let removeDeleteMarker = false;
|
||||||
error: err,
|
let deleteOptions = undefined;
|
||||||
method: 'metadataValidateAuthorization',
|
return async.waterfall([
|
||||||
});
|
callback => services.metadataValidateAuthorization(valParams, callback),
|
||||||
return cb(err, corsHeaders);
|
(bucket, objMD, callback) => {
|
||||||
}
|
corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||||
|
request.method, bucket);
|
||||||
|
bucketMD = bucket;
|
||||||
|
objectMD = objMD;
|
||||||
|
const versioning = bucketMD.getVersioningConfiguration();
|
||||||
|
// TODO adhere to the behaviour of AWS: create a delete marker
|
||||||
|
// even if the deleting object does not exist
|
||||||
if (!objMD) {
|
if (!objMD) {
|
||||||
return cb(errors.NoSuchKey, corsHeaders);
|
if (versioning) {
|
||||||
}
|
if (reqVersionId) {
|
||||||
const headerValResult = validateHeaders(objMD, request.headers);
|
corsHeaders['x-amz-version-id'] =
|
||||||
if (headerValResult.error) {
|
VID.encrypt(reqVersionId);
|
||||||
return cb(headerValResult.error, corsHeaders);
|
return cb(null, corsHeaders, null);
|
||||||
}
|
|
||||||
if (objMD['content-length']) {
|
|
||||||
log.end().addDefaultFields({
|
|
||||||
contentLength: objMD['content-length'],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return services.deleteObject(bucketName, objMD, objectKey, log,
|
|
||||||
err => {
|
|
||||||
if (err) {
|
|
||||||
return cb(err, corsHeaders);
|
|
||||||
}
|
}
|
||||||
pushMetric('deleteObject', log, {
|
} else {
|
||||||
authInfo,
|
return callback(errors.NoSuchKey);
|
||||||
bucket: bucketName,
|
}
|
||||||
byteLength: objMD['content-length'],
|
}
|
||||||
numberOfObjects: 1,
|
if (objMD) {
|
||||||
|
const headerValResult = validateHeaders(objMD, request.headers);
|
||||||
|
if (headerValResult.error) {
|
||||||
|
return callback(headerValResult.error);
|
||||||
|
}
|
||||||
|
if (objMD['content-length']) {
|
||||||
|
log.end().addDefaultFields({
|
||||||
|
contentLength: objMD['content-length'],
|
||||||
});
|
});
|
||||||
return cb(null, corsHeaders);
|
}
|
||||||
});
|
}
|
||||||
});
|
return callback();
|
||||||
|
},
|
||||||
|
callback => services.preprocessingVersioningDelete(bucketName,
|
||||||
|
bucketMD, objectKey, objectMD, reqVersionId, log, callback),
|
||||||
|
(options, callback) => {
|
||||||
|
if (options && options.deleteData) {
|
||||||
|
// delete object
|
||||||
|
deleteOptions = options;
|
||||||
|
return callback(null, options);
|
||||||
|
}
|
||||||
|
// putting a new delete marker
|
||||||
|
if (bucketMD.hasDeletedFlag() &&
|
||||||
|
canonicalID !== bucketMD.getOwner()) {
|
||||||
|
log.trace('deleted flag on bucket and request ' +
|
||||||
|
'from non-owner account');
|
||||||
|
return callback(errors.NoSuchBucket);
|
||||||
|
}
|
||||||
|
if (bucketMD.hasTransientFlag() || bucketMD.hasDeletedFlag()) {
|
||||||
|
return cleanUpBucket(bucketMD, canonicalID,
|
||||||
|
log, err => callback(err, null));
|
||||||
|
}
|
||||||
|
return callback(null, null);
|
||||||
|
},
|
||||||
|
(options, callback) => {
|
||||||
|
if (options && options.deleteData) {
|
||||||
|
if (objectMD.isDeleteMarker) {
|
||||||
|
removeDeleteMarker = true;
|
||||||
|
}
|
||||||
|
return services.deleteObject(bucketName, objectMD, objectKey,
|
||||||
|
options, log, callback);
|
||||||
|
}
|
||||||
|
request.isDeleteMarker = true; // eslint-disable-line
|
||||||
|
return createAndStoreObject(bucketName, bucketMD,
|
||||||
|
objectKey, objectMD, authInfo, canonicalID, null, request,
|
||||||
|
null, log, callback);
|
||||||
|
},
|
||||||
|
], (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
log.debug('error processing request', { error: err,
|
||||||
|
method: 'metadataValidateAuthorization' });
|
||||||
|
} else if (deleteOptions === undefined) {
|
||||||
|
// TODO metric for delete marker
|
||||||
|
if (res.versionId) {
|
||||||
|
corsHeaders['x-amz-delete-marker'] = true;
|
||||||
|
corsHeaders['x-amz-version-id'] = res.versionId === 'null' ?
|
||||||
|
res.versionId : VID.encrypt(res.versionId);
|
||||||
|
}
|
||||||
|
pushMetric('putObject', log, { authInfo, bucket: bucketName,
|
||||||
|
newByteLength: 0, oldByteLength: 0 });
|
||||||
|
} else {
|
||||||
|
if (reqVersionId) {
|
||||||
|
corsHeaders['x-amz-version-id'] = reqVersionId === 'null' ?
|
||||||
|
reqVersionId : VID.encrypt(reqVersionId);
|
||||||
|
if (removeDeleteMarker) {
|
||||||
|
corsHeaders['x-amz-delete-marker'] = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pushMetric('deleteObject', log, { authInfo, bucket: bucketName,
|
||||||
|
byteLength: objectMD['content-length'], numberOfObjects: 1 });
|
||||||
|
}
|
||||||
|
return cb(err, corsHeaders);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { errors } from 'arsenal';
|
import { errors, versioning } from 'arsenal';
|
||||||
|
import async from 'async';
|
||||||
|
|
||||||
import { parseRange } from './apiUtils/object/parseRange';
|
import { parseRange } from './apiUtils/object/parseRange';
|
||||||
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
||||||
|
@ -7,6 +8,8 @@ import services from '../services';
|
||||||
import validateHeaders from '../utilities/validateHeaders';
|
import validateHeaders from '../utilities/validateHeaders';
|
||||||
import { pushMetric } from '../utapi/utilities';
|
import { pushMetric } from '../utapi/utilities';
|
||||||
|
|
||||||
|
const VID = versioning.VersionID;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* GET Object - Get an object
|
* GET Object - Get an object
|
||||||
* @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info
|
* @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info
|
||||||
|
@ -20,88 +23,150 @@ function objectGet(authInfo, request, log, callback) {
|
||||||
log.debug('processing request', { method: 'objectGet' });
|
log.debug('processing request', { method: 'objectGet' });
|
||||||
const bucketName = request.bucketName;
|
const bucketName = request.bucketName;
|
||||||
const objectKey = request.objectKey;
|
const objectKey = request.objectKey;
|
||||||
|
let versionId = request.query ? request.query.versionId : undefined;
|
||||||
|
versionId = versionId || undefined; // to smooth out versionId ''
|
||||||
|
|
||||||
|
if (versionId && versionId !== 'null') {
|
||||||
|
try {
|
||||||
|
versionId = VID.decrypt(versionId);
|
||||||
|
} catch (exception) { // eslint-disable-line
|
||||||
|
return callback(errors.InvalidArgument
|
||||||
|
.customizeDescription('Invalid version id specified'), null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const mdValParams = {
|
const mdValParams = {
|
||||||
authInfo,
|
authInfo,
|
||||||
bucketName,
|
bucketName,
|
||||||
objectKey,
|
objectKey,
|
||||||
|
versionId: versionId === 'null' ? undefined : versionId,
|
||||||
requestType: 'objectGet',
|
requestType: 'objectGet',
|
||||||
log,
|
log,
|
||||||
};
|
};
|
||||||
|
|
||||||
services.metadataValidateAuthorization(mdValParams, (err, bucket,
|
return async.waterfall([
|
||||||
objMD) => {
|
next => services.metadataValidateAuthorization(mdValParams,
|
||||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
(err, bucket, objMD) => {
|
||||||
request.method, bucket);
|
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||||
if (err) {
|
request.method, bucket);
|
||||||
log.debug('error processing request', { error: err });
|
if (err) {
|
||||||
return callback(err, null, corsHeaders);
|
log.debug('error processing request', { error: err });
|
||||||
}
|
return next(err, null, corsHeaders);
|
||||||
if (!objMD) {
|
}
|
||||||
return callback(errors.NoSuchKey, null, corsHeaders);
|
if (!objMD) {
|
||||||
}
|
return next(errors.NoSuchKey, null, corsHeaders);
|
||||||
const headerValResult = validateHeaders(objMD, request.headers);
|
}
|
||||||
if (headerValResult.error) {
|
if (versionId === undefined) {
|
||||||
return callback(headerValResult.error, null, corsHeaders);
|
return next(null, bucket, objMD);
|
||||||
}
|
}
|
||||||
const responseMetaHeaders = collectResponseHeaders(objMD, corsHeaders);
|
if (versionId !== 'null') {
|
||||||
// 0 bytes file
|
return next(null, bucket, objMD);
|
||||||
if (objMD.location === null) {
|
}
|
||||||
|
if (objMD.isNull || (objMD && !objMD.versionId)) {
|
||||||
|
return next(null, bucket, objMD);
|
||||||
|
}
|
||||||
|
if (objMD.nullVersionId === undefined) {
|
||||||
|
return next(errors.NoSuchVersion, null, corsHeaders);
|
||||||
|
}
|
||||||
|
mdValParams.versionId = objMD.nullVersionId;
|
||||||
|
return services.metadataValidateAuthorization(mdValParams,
|
||||||
|
(err, bucket, objMD) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err, null, corsHeaders);
|
||||||
|
}
|
||||||
|
if (!objMD) {
|
||||||
|
return next(errors.NoSuchKey, null, corsHeaders);
|
||||||
|
}
|
||||||
|
return next(null, bucket, objMD);
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
(bucket, objMD, next) => {
|
||||||
|
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||||
|
request.method, bucket);
|
||||||
|
const headerValResult = validateHeaders(objMD, request.headers);
|
||||||
|
if (headerValResult.error) {
|
||||||
|
return next(headerValResult.error, null, corsHeaders);
|
||||||
|
}
|
||||||
|
const responseMetaHeaders = collectResponseHeaders(objMD,
|
||||||
|
corsHeaders);
|
||||||
|
// TODO: can probably extract this in a utility function, similar
|
||||||
|
// logic is used for get ACL
|
||||||
|
if (bucket.getVersioningConfiguration()) {
|
||||||
|
if (objMD.isNull || (objMD && !objMD.versionId)) {
|
||||||
|
responseMetaHeaders['x-amz-version-id'] = 'null';
|
||||||
|
} else if (objMD.versionId) {
|
||||||
|
responseMetaHeaders['x-amz-version-id'] =
|
||||||
|
VID.encrypt(objMD.versionId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (objMD.isDeleteMarker) {
|
||||||
|
// TODO check if versionId should also be included
|
||||||
|
responseMetaHeaders['x-amz-delete-marker'] = true;
|
||||||
|
if (versionId) {
|
||||||
|
return next(errors.MethodNotAllowed, null,
|
||||||
|
responseMetaHeaders);
|
||||||
|
}
|
||||||
|
return next(errors.NoSuchKey, null, responseMetaHeaders);
|
||||||
|
}
|
||||||
|
// 0 bytes file
|
||||||
|
if (objMD.location === null) {
|
||||||
|
if (request.headers.range) {
|
||||||
|
return next(errors.InvalidRange, null, corsHeaders);
|
||||||
|
}
|
||||||
|
pushMetric('getObject', log, {
|
||||||
|
authInfo,
|
||||||
|
bucket: bucketName,
|
||||||
|
newByteLength: 0,
|
||||||
|
});
|
||||||
|
return next(null, null, responseMetaHeaders);
|
||||||
|
}
|
||||||
|
let range;
|
||||||
|
let maxContentLength;
|
||||||
if (request.headers.range) {
|
if (request.headers.range) {
|
||||||
return callback(errors.InvalidRange, null, corsHeaders);
|
maxContentLength =
|
||||||
|
parseInt(responseMetaHeaders['Content-Length'], 10);
|
||||||
|
responseMetaHeaders['Accept-Ranges'] = 'bytes';
|
||||||
|
const parseRangeRes = parseRange(request.headers.range,
|
||||||
|
maxContentLength);
|
||||||
|
range = parseRangeRes.range;
|
||||||
|
const error = parseRangeRes.error;
|
||||||
|
if (error) {
|
||||||
|
return next(error, null, corsHeaders);
|
||||||
|
}
|
||||||
|
if (range) {
|
||||||
|
// End of range should be included so + 1
|
||||||
|
responseMetaHeaders['Content-Length'] =
|
||||||
|
Math.min(maxContentLength - range[0],
|
||||||
|
range[1] - range[0] + 1);
|
||||||
|
responseMetaHeaders['Content-Range'] = `bytes ${range[0]}-`
|
||||||
|
+ `${Math.min(maxContentLength - 1, range[1])}` +
|
||||||
|
`/${maxContentLength}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// To provide for backwards compatibility before md-model-version 2,
|
||||||
|
// need to handle cases where objMD.location is just a string
|
||||||
|
const dataLocator = Array.isArray(objMD.location) ?
|
||||||
|
objMD.location : [{ key: objMD.location }];
|
||||||
|
// If have a data model before version 2, cannot support get range
|
||||||
|
// for objects with multiple parts
|
||||||
|
if (range && dataLocator.length > 1 &&
|
||||||
|
dataLocator[0].start === undefined) {
|
||||||
|
return next(errors.NotImplemented, null, corsHeaders);
|
||||||
|
}
|
||||||
|
if (objMD['x-amz-server-side-encryption']) {
|
||||||
|
for (let i = 0; i < dataLocator.length; i++) {
|
||||||
|
dataLocator[i].masterKeyId =
|
||||||
|
objMD['x-amz-server-side-encryption-aws-kms-key-id'];
|
||||||
|
dataLocator[i].algorithm =
|
||||||
|
objMD['x-amz-server-side-encryption'];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
pushMetric('getObject', log, {
|
pushMetric('getObject', log, {
|
||||||
authInfo,
|
authInfo,
|
||||||
bucket: bucketName,
|
bucket: bucketName,
|
||||||
newByteLength: 0,
|
newByteLength: responseMetaHeaders['Content-Length'],
|
||||||
});
|
});
|
||||||
return callback(null, null, responseMetaHeaders);
|
return next(null, dataLocator, responseMetaHeaders, range);
|
||||||
}
|
},
|
||||||
let range;
|
], callback);
|
||||||
let maxContentLength;
|
|
||||||
if (request.headers.range) {
|
|
||||||
maxContentLength =
|
|
||||||
parseInt(responseMetaHeaders['Content-Length'], 10);
|
|
||||||
responseMetaHeaders['Accept-Ranges'] = 'bytes';
|
|
||||||
const parseRangeRes = parseRange(request.headers.range,
|
|
||||||
maxContentLength);
|
|
||||||
range = parseRangeRes.range;
|
|
||||||
const error = parseRangeRes.error;
|
|
||||||
if (error) {
|
|
||||||
return callback(error, null, corsHeaders);
|
|
||||||
}
|
|
||||||
if (range) {
|
|
||||||
// End of range should be included so + 1
|
|
||||||
responseMetaHeaders['Content-Length'] =
|
|
||||||
Math.min(maxContentLength - range[0],
|
|
||||||
range[1] - range[0] + 1);
|
|
||||||
responseMetaHeaders['Content-Range'] = `bytes ${range[0]}-`
|
|
||||||
+ `${Math.min(maxContentLength - 1, range[1])}` +
|
|
||||||
`/${maxContentLength}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// To provide for backwards compatibility before md-model-version 2,
|
|
||||||
// need to handle cases where objMD.location is just a string
|
|
||||||
const dataLocator = Array.isArray(objMD.location) ?
|
|
||||||
objMD.location : [{ key: objMD.location }];
|
|
||||||
// If have a data model before version 2, cannot support get range
|
|
||||||
// for objects with multiple parts
|
|
||||||
if (range && dataLocator.length > 1 &&
|
|
||||||
dataLocator[0].start === undefined) {
|
|
||||||
return callback(errors.NotImplemented, null, corsHeaders);
|
|
||||||
}
|
|
||||||
if (objMD['x-amz-server-side-encryption']) {
|
|
||||||
for (let i = 0; i < dataLocator.length; i++) {
|
|
||||||
dataLocator[i].masterKeyId =
|
|
||||||
objMD['x-amz-server-side-encryption-aws-kms-key-id'];
|
|
||||||
dataLocator[i].algorithm =
|
|
||||||
objMD['x-amz-server-side-encryption'];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pushMetric('getObject', log, {
|
|
||||||
authInfo,
|
|
||||||
bucket: bucketName,
|
|
||||||
newByteLength: responseMetaHeaders['Content-Length'],
|
|
||||||
});
|
|
||||||
return callback(null, dataLocator, responseMetaHeaders, range);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { errors } from 'arsenal';
|
import { errors, versioning } from 'arsenal';
|
||||||
|
import async from 'async';
|
||||||
|
|
||||||
import aclUtils from '../utilities/aclUtils';
|
import aclUtils from '../utilities/aclUtils';
|
||||||
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
||||||
|
@ -7,6 +8,8 @@ import { pushMetric } from '../utapi/utilities';
|
||||||
import services from '../services';
|
import services from '../services';
|
||||||
import vault from '../auth/vault';
|
import vault from '../auth/vault';
|
||||||
|
|
||||||
|
const VID = versioning.VersionID;
|
||||||
|
|
||||||
// Sample XML response:
|
// Sample XML response:
|
||||||
/*
|
/*
|
||||||
<AccessControlPolicy>
|
<AccessControlPolicy>
|
||||||
|
@ -41,10 +44,23 @@ export default function objectGetACL(authInfo, request, log, callback) {
|
||||||
log.debug('processing request', { method: 'objectGetACL' });
|
log.debug('processing request', { method: 'objectGetACL' });
|
||||||
const bucketName = request.bucketName;
|
const bucketName = request.bucketName;
|
||||||
const objectKey = request.objectKey;
|
const objectKey = request.objectKey;
|
||||||
|
let versionId = request.query ? request.query.versionId : undefined;
|
||||||
|
versionId = versionId || undefined; // to smooth out versionId ''
|
||||||
|
|
||||||
|
if (versionId && versionId !== 'null') {
|
||||||
|
try {
|
||||||
|
versionId = VID.decrypt(versionId);
|
||||||
|
} catch (exception) { // eslint-disable-line
|
||||||
|
return callback(errors.InvalidArgument
|
||||||
|
.customizeDescription('Invalid version id specified'), null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const metadataValParams = {
|
const metadataValParams = {
|
||||||
authInfo,
|
authInfo,
|
||||||
bucketName,
|
bucketName,
|
||||||
objectKey,
|
objectKey,
|
||||||
|
versionId: versionId === 'null' ? undefined : versionId,
|
||||||
requestType: 'objectGetACL',
|
requestType: 'objectGetACL',
|
||||||
log,
|
log,
|
||||||
};
|
};
|
||||||
|
@ -60,19 +76,59 @@ export default function objectGetACL(authInfo, request, log, callback) {
|
||||||
constants.logId,
|
constants.logId,
|
||||||
];
|
];
|
||||||
|
|
||||||
services.metadataValidateAuthorization(metadataValParams,
|
return async.waterfall([
|
||||||
|
callback => services.metadataValidateAuthorization(metadataValParams,
|
||||||
(err, bucket, objectMD) => {
|
(err, bucket, objectMD) => {
|
||||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
|
||||||
request.method, bucket);
|
|
||||||
if (err) {
|
if (err) {
|
||||||
log.trace('request authorization failed',
|
log.trace('request authorization failed',
|
||||||
{ method: 'objectGetACL', error: err });
|
{ method: 'objectGetACL', error: err });
|
||||||
return callback(err, null, corsHeaders);
|
return callback(err, bucket);
|
||||||
}
|
}
|
||||||
if (!objectMD) {
|
if (!objectMD) {
|
||||||
|
const err = versionId ? errors.NoSuchVersion :
|
||||||
|
errors.NoSuchKey;
|
||||||
log.trace('error processing request',
|
log.trace('error processing request',
|
||||||
{ method: 'objectGetACL', error: err });
|
{ method: 'objectGetACL', error: err });
|
||||||
return callback(errors.NoSuchKey, null, corsHeaders);
|
return callback(err, bucket);
|
||||||
|
}
|
||||||
|
if (versionId === undefined) {
|
||||||
|
return callback(null, bucket, objectMD);
|
||||||
|
}
|
||||||
|
if (versionId !== 'null') {
|
||||||
|
return callback(null, bucket, objectMD);
|
||||||
|
}
|
||||||
|
if (objectMD.isNull || (objectMD && !objectMD.versionId)) {
|
||||||
|
return callback(null, bucket, objectMD);
|
||||||
|
}
|
||||||
|
if (!objectMD.nullVersionId) {
|
||||||
|
return callback(errors.NoSuchVersion, bucket);
|
||||||
|
}
|
||||||
|
metadataValParams.versionId = objectMD.nullVersionId;
|
||||||
|
return services.metadataValidateAuthorization(
|
||||||
|
metadataValParams, (err, bucket, objectMD) => {
|
||||||
|
if (err) {
|
||||||
|
log.trace('request authorization failed',
|
||||||
|
{ method: 'objectGetACL', error: err });
|
||||||
|
return callback(err, bucket);
|
||||||
|
}
|
||||||
|
if (!objectMD) {
|
||||||
|
log.trace('error processing request',
|
||||||
|
{ method: 'objectGetACL', error: err });
|
||||||
|
return callback(errors.NoSuchVersion, bucket);
|
||||||
|
}
|
||||||
|
return callback(null, bucket, objectMD);
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
(bucket, objectMD, callback) => {
|
||||||
|
// if versioning is enabled or suspended, return version id in
|
||||||
|
// response headers
|
||||||
|
let resVersionId;
|
||||||
|
if (bucket.getVersioningConfiguration()) {
|
||||||
|
if (objectMD.isNull || (objectMD && !objectMD.versionId)) {
|
||||||
|
resVersionId = 'null';
|
||||||
|
} else {
|
||||||
|
resVersionId = VID.encrypt(objectMD.versionId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
const objectACL = objectMD.acl;
|
const objectACL = objectMD.acl;
|
||||||
const allSpecificGrants = [].concat(
|
const allSpecificGrants = [].concat(
|
||||||
|
@ -109,7 +165,7 @@ export default function objectGetACL(authInfo, request, log, callback) {
|
||||||
authInfo,
|
authInfo,
|
||||||
bucket: bucketName,
|
bucket: bucketName,
|
||||||
});
|
});
|
||||||
return callback(null, xml, corsHeaders);
|
return callback(null, bucket, xml, resVersionId);
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Build array of all canonicalIDs used in ACLs so duplicates
|
* Build array of all canonicalIDs used in ACLs so duplicates
|
||||||
|
@ -143,7 +199,7 @@ export default function objectGetACL(authInfo, request, log, callback) {
|
||||||
authInfo,
|
authInfo,
|
||||||
bucket: bucketName,
|
bucket: bucketName,
|
||||||
});
|
});
|
||||||
return callback(null, xml, corsHeaders);
|
return callback(null, bucket, xml, resVersionId);
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* If acl's set by account canonicalID,
|
* If acl's set by account canonicalID,
|
||||||
|
@ -154,7 +210,7 @@ export default function objectGetACL(authInfo, request, log, callback) {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.trace('error processing request',
|
log.trace('error processing request',
|
||||||
{ method: 'objectGetACL', error: err });
|
{ method: 'objectGetACL', error: err });
|
||||||
return callback(err, null, corsHeaders);
|
return callback(err, bucket);
|
||||||
}
|
}
|
||||||
const individualGrants = canonicalIDs.map(canonicalID => {
|
const individualGrants = canonicalIDs.map(canonicalID => {
|
||||||
/**
|
/**
|
||||||
|
@ -180,11 +236,20 @@ export default function objectGetACL(authInfo, request, log, callback) {
|
||||||
.concat(individualGrants).concat(uriGrantInfo);
|
.concat(individualGrants).concat(uriGrantInfo);
|
||||||
// parse info about accounts and owner info to convert to xml
|
// parse info about accounts and owner info to convert to xml
|
||||||
const xml = aclUtils.convertToXml(grantInfo);
|
const xml = aclUtils.convertToXml(grantInfo);
|
||||||
pushMetric('getObjectAcl', log, {
|
return callback(null, bucket, xml, resVersionId);
|
||||||
authInfo,
|
|
||||||
bucket: bucketName,
|
|
||||||
});
|
|
||||||
return callback(null, xml, corsHeaders);
|
|
||||||
});
|
});
|
||||||
|
},
|
||||||
|
], (err, bucket, xml, resVersionId) => {
|
||||||
|
const resHeaders = collectCorsHeaders(request.headers.origin,
|
||||||
|
request.method, bucket);
|
||||||
|
if (err) {
|
||||||
|
return callback(err, null, resHeaders);
|
||||||
|
}
|
||||||
|
pushMetric('getObjectAcl', log, {
|
||||||
|
authInfo,
|
||||||
|
bucket: bucketName,
|
||||||
});
|
});
|
||||||
|
resHeaders['x-amz-version-id'] = resVersionId;
|
||||||
|
return callback(null, xml, resHeaders);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { errors } from 'arsenal';
|
import { errors, versioning } from 'arsenal';
|
||||||
|
import async from 'async';
|
||||||
|
|
||||||
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
||||||
import collectResponseHeaders from '../utilities/collectResponseHeaders';
|
import collectResponseHeaders from '../utilities/collectResponseHeaders';
|
||||||
|
@ -6,6 +7,8 @@ import services from '../services';
|
||||||
import validateHeaders from '../utilities/validateHeaders';
|
import validateHeaders from '../utilities/validateHeaders';
|
||||||
import { pushMetric } from '../utapi/utilities';
|
import { pushMetric } from '../utapi/utilities';
|
||||||
|
|
||||||
|
const VID = versioning.VersionID;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HEAD Object - Same as Get Object but only respond with headers
|
* HEAD Object - Same as Get Object but only respond with headers
|
||||||
*(no actual body)
|
*(no actual body)
|
||||||
|
@ -20,15 +23,29 @@ export default function objectHead(authInfo, request, log, callback) {
|
||||||
log.debug('processing request', { method: 'objectHead' });
|
log.debug('processing request', { method: 'objectHead' });
|
||||||
const bucketName = request.bucketName;
|
const bucketName = request.bucketName;
|
||||||
const objectKey = request.objectKey;
|
const objectKey = request.objectKey;
|
||||||
const metadataValParams = {
|
let versionId = request.query ? request.query.versionId : undefined;
|
||||||
|
versionId = versionId || undefined; // to smooth out versionId ''
|
||||||
|
|
||||||
|
if (versionId && versionId !== 'null') {
|
||||||
|
try {
|
||||||
|
versionId = VID.decrypt(versionId);
|
||||||
|
} catch (exception) { // eslint-disable-line
|
||||||
|
return callback(errors.InvalidArgument
|
||||||
|
.customizeDescription('Invalid version id specified'), null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const mdValParams = {
|
||||||
authInfo,
|
authInfo,
|
||||||
bucketName,
|
bucketName,
|
||||||
objectKey,
|
objectKey,
|
||||||
|
versionId: versionId === 'null' ? undefined : versionId,
|
||||||
requestType: 'objectHead',
|
requestType: 'objectHead',
|
||||||
log,
|
log,
|
||||||
};
|
};
|
||||||
|
|
||||||
return services.metadataValidateAuthorization(metadataValParams,
|
return async.waterfall([
|
||||||
|
next => services.metadataValidateAuthorization(mdValParams,
|
||||||
(err, bucket, objMD) => {
|
(err, bucket, objMD) => {
|
||||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||||
request.method, bucket);
|
request.method, bucket);
|
||||||
|
@ -37,21 +54,48 @@ export default function objectHead(authInfo, request, log, callback) {
|
||||||
error: err,
|
error: err,
|
||||||
method: 'metadataValidateAuthorization',
|
method: 'metadataValidateAuthorization',
|
||||||
});
|
});
|
||||||
return callback(err, corsHeaders);
|
return next(err, corsHeaders);
|
||||||
}
|
}
|
||||||
if (!objMD) {
|
if (!objMD) {
|
||||||
return callback(errors.NoSuchKey, corsHeaders);
|
return next(errors.NoSuchKey, corsHeaders);
|
||||||
}
|
}
|
||||||
|
if (versionId === undefined) {
|
||||||
|
return next(null, bucket, objMD);
|
||||||
|
}
|
||||||
|
if (versionId !== 'null') {
|
||||||
|
return next(null, bucket, objMD);
|
||||||
|
}
|
||||||
|
if (objMD.isNull) {
|
||||||
|
return next(null, bucket, objMD);
|
||||||
|
}
|
||||||
|
if (objMD.nullVersionId === undefined) {
|
||||||
|
return next(errors.NoSuchKey, corsHeaders);
|
||||||
|
}
|
||||||
|
mdValParams.versionId = objMD.nullVersionId;
|
||||||
|
return services.metadataValidateAuthorization(mdValParams,
|
||||||
|
(err, bucket, objMD) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err, corsHeaders);
|
||||||
|
}
|
||||||
|
if (!objMD) {
|
||||||
|
return next(errors.NoSuchKey, corsHeaders);
|
||||||
|
}
|
||||||
|
return next(null, bucket, objMD);
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
(bucket, objMD, next) => {
|
||||||
|
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||||
|
request.method, bucket);
|
||||||
const headerValResult = validateHeaders(objMD, request.headers);
|
const headerValResult = validateHeaders(objMD, request.headers);
|
||||||
if (headerValResult.error) {
|
if (headerValResult.error) {
|
||||||
return callback(headerValResult.error, corsHeaders);
|
return next(headerValResult.error, corsHeaders);
|
||||||
}
|
}
|
||||||
const responseMetaHeaders = collectResponseHeaders(objMD,
|
const responseHeaders = collectResponseHeaders(objMD, corsHeaders);
|
||||||
corsHeaders);
|
if (versionId) {
|
||||||
pushMetric('headObject', log, {
|
responseHeaders['x-amz-version-id'] = VID.encrypt(versionId);
|
||||||
authInfo,
|
}
|
||||||
bucket: bucketName,
|
pushMetric('headObject', log, { authInfo, bucket: bucketName });
|
||||||
});
|
return next(null, responseHeaders);
|
||||||
return callback(null, responseMetaHeaders);
|
},
|
||||||
});
|
], callback);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import { errors } from 'arsenal';
|
import { errors, versioning } from 'arsenal';
|
||||||
|
import async from 'async';
|
||||||
|
|
||||||
import data from '../data/wrapper';
|
import data from '../data/wrapper';
|
||||||
import services from '../services';
|
import services from '../services';
|
||||||
|
@ -12,23 +13,27 @@ import { logger } from '../utilities/logger';
|
||||||
import { pushMetric } from '../utapi/utilities';
|
import { pushMetric } from '../utapi/utilities';
|
||||||
import kms from '../kms/wrapper';
|
import kms from '../kms/wrapper';
|
||||||
import removeAWSChunked from './apiUtils/object/removeAWSChunked';
|
import removeAWSChunked from './apiUtils/object/removeAWSChunked';
|
||||||
|
import metadata from '../metadata/wrapper';
|
||||||
|
|
||||||
|
const VID = versioning.VersionID;
|
||||||
|
|
||||||
|
|
||||||
function _storeInMDandDeleteData(bucketName, dataGetInfo, cipherBundle,
|
function _storeInMDandDeleteData(bucketName, dataGetInfo, cipherBundle,
|
||||||
metadataStoreParams, dataToDelete, deleteLog, callback) {
|
metadataStoreParams, dataToDelete, deleteLog, callback) {
|
||||||
services.metadataStoreObject(bucketName, dataGetInfo,
|
services.metadataStoreObject(bucketName, dataGetInfo,
|
||||||
cipherBundle, metadataStoreParams, (err, contentMD5) => {
|
cipherBundle, metadataStoreParams, (err, res) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return callback(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
if (dataToDelete) {
|
if (dataToDelete) {
|
||||||
data.batchDelete(dataToDelete, deleteLog);
|
data.batchDelete(dataToDelete, deleteLog);
|
||||||
}
|
}
|
||||||
return callback(null, contentMD5);
|
return callback(null, res);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function _storeIt(bucketName, objectKey, objMD, authInfo, canonicalID,
|
function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
|
||||||
cipherBundle, request, streamingV4Params, log, callback) {
|
canonicalID, cipherBundle, request, streamingV4Params, log, callback) {
|
||||||
const size = request.parsedContentLength;
|
const size = request.parsedContentLength;
|
||||||
|
|
||||||
const websiteRedirectHeader =
|
const websiteRedirectHeader =
|
||||||
|
@ -55,98 +60,85 @@ function _storeIt(bucketName, objectKey, objMD, authInfo, canonicalID,
|
||||||
headers['x-amz-acl'] = request.query['x-amz-acl'];
|
headers['x-amz-acl'] = request.query['x-amz-acl'];
|
||||||
}
|
}
|
||||||
const metadataStoreParams = {
|
const metadataStoreParams = {
|
||||||
objectKey,
|
objectKey, authInfo, metaHeaders, size,
|
||||||
authInfo,
|
|
||||||
metaHeaders,
|
|
||||||
size,
|
|
||||||
contentType: request.headers['content-type'],
|
contentType: request.headers['content-type'],
|
||||||
cacheControl: request.headers['cache-control'],
|
cacheControl: request.headers['cache-control'],
|
||||||
contentDisposition: request.headers['content-disposition'],
|
contentDisposition: request.headers['content-disposition'],
|
||||||
contentEncoding:
|
contentEncoding: removeAWSChunked(request.headers['content-encoding']),
|
||||||
removeAWSChunked(request.headers['content-encoding']),
|
expires: request.headers.expires, headers, log,
|
||||||
expires: request.headers.expires,
|
isDeleteMarker: request.isDeleteMarker,
|
||||||
headers,
|
|
||||||
log,
|
|
||||||
};
|
};
|
||||||
let dataToDelete;
|
let dataGetInfoArr = undefined;
|
||||||
|
let dataToDelete = undefined;
|
||||||
if (objMD && objMD.location) {
|
if (objMD && objMD.location) {
|
||||||
dataToDelete = Array.isArray(objMD.location) ?
|
dataToDelete = Array.isArray(objMD.location) ?
|
||||||
objMD.location : [objMD.location];
|
objMD.location : [objMD.location];
|
||||||
}
|
}
|
||||||
|
const reqVersionId = request.query ? request.query.versionId : undefined;
|
||||||
|
|
||||||
// null - new object
|
// null - new object
|
||||||
// 0 or > 0 - existing object with content-length 0 or greater than 0
|
// 0 or > 0 - existing object with content-length 0 or greater than 0
|
||||||
const prevContentLen = objMD && objMD['content-length'] !== undefined ?
|
const requestLogger =
|
||||||
objMD['content-length'] : null;
|
logger.newRequestLoggerFromSerializedUids(log.getSerializedUids());
|
||||||
if (size !== 0) {
|
return async.waterfall([
|
||||||
log.trace('storing object in data', {
|
callback => {
|
||||||
method: 'services.metadataValidateAuthorization',
|
if (size === 0) {
|
||||||
});
|
metadataStoreParams.contentMD5 = constants.emptyFileMd5;
|
||||||
return dataStore(objectKeyContext, cipherBundle, request, size,
|
return callback(null, null, null);
|
||||||
streamingV4Params, log, (err, dataGetInfo, calculatedHash) => {
|
}
|
||||||
if (err) {
|
return dataStore(objectKeyContext, cipherBundle, request, size,
|
||||||
log.trace('error from data', {
|
streamingV4Params, log, callback);
|
||||||
error: err,
|
},
|
||||||
method: 'dataStore',
|
(dataGetInfo, calculatedHash, callback) => {
|
||||||
});
|
if (dataGetInfo === null || dataGetInfo === undefined) {
|
||||||
return callback(err);
|
return callback(null, null);
|
||||||
}
|
}
|
||||||
// So that data retrieval information for MPU's and
|
// So that data retrieval information for MPU's and
|
||||||
// regular puts are stored in the same data structure,
|
// regular puts are stored in the same data structure,
|
||||||
// place the retrieval info here into a single element array
|
// place the retrieval info here into a single element array
|
||||||
const dataGetInfoArr = [{
|
const { key, dataStoreName } = dataGetInfo;
|
||||||
key: dataGetInfo.key,
|
const dataGetInfoArr = [{ key, size, start: 0, dataStoreName }];
|
||||||
size,
|
if (cipherBundle) {
|
||||||
start: 0,
|
dataGetInfoArr[0].cryptoScheme = cipherBundle.cryptoScheme;
|
||||||
dataStoreName: dataGetInfo.dataStoreName,
|
dataGetInfoArr[0].cipheredDataKey =
|
||||||
}];
|
cipherBundle.cipheredDataKey;
|
||||||
if (cipherBundle) {
|
}
|
||||||
dataGetInfoArr[0].cryptoScheme = cipherBundle.cryptoScheme;
|
metadataStoreParams.contentMD5 = calculatedHash;
|
||||||
dataGetInfoArr[0].cipheredDataKey =
|
return callback(null, dataGetInfoArr);
|
||||||
cipherBundle.cipheredDataKey;
|
},
|
||||||
}
|
(infoArr, callback) => {
|
||||||
metadataStoreParams.contentMD5 = calculatedHash;
|
dataGetInfoArr = infoArr;
|
||||||
return _storeInMDandDeleteData(
|
return services.versioningPreprocessing(bucketName, bucketMD,
|
||||||
bucketName, dataGetInfoArr, cipherBundle,
|
metadataStoreParams.objectKey, objMD, reqVersionId, log,
|
||||||
metadataStoreParams, dataToDelete,
|
callback);
|
||||||
logger.newRequestLoggerFromSerializedUids(
|
},
|
||||||
log.getSerializedUids()), (err, contentMD5) => {
|
(options, callback) => {
|
||||||
if (err) {
|
if (!options.deleteNullVersionData) {
|
||||||
return callback(err);
|
return callback(null, options);
|
||||||
}
|
}
|
||||||
pushMetric('putObject', log, {
|
const params = { versionId: options.nullVersionId };
|
||||||
authInfo,
|
return metadata.getObjectMD(bucketName, objectKey,
|
||||||
bucket: bucketName,
|
params, log, (err, nullObjMD) => {
|
||||||
newByteLength: size,
|
if (nullObjMD.location) {
|
||||||
oldByteLength: prevContentLen,
|
dataToDelete = Array.isArray(nullObjMD.location) ?
|
||||||
});
|
nullObjMD.location : [nullObjMD.location];
|
||||||
return callback(null, contentMD5);
|
}
|
||||||
});
|
return callback(null, options);
|
||||||
});
|
|
||||||
}
|
|
||||||
log.trace('content-length is 0 so only storing metadata', {
|
|
||||||
method: 'services.metadataValidateAuthorization',
|
|
||||||
});
|
|
||||||
metadataStoreParams.contentMD5 = constants.emptyFileMd5;
|
|
||||||
const dataGetInfo = null;
|
|
||||||
return _storeInMDandDeleteData(bucketName, dataGetInfo, cipherBundle,
|
|
||||||
metadataStoreParams, dataToDelete,
|
|
||||||
logger.newRequestLoggerFromSerializedUids(log
|
|
||||||
.getSerializedUids()), (err, contentMD5) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
pushMetric('putObject', log, {
|
|
||||||
authInfo,
|
|
||||||
bucket: bucketName,
|
|
||||||
newByteLength: size,
|
|
||||||
oldByteLength: prevContentLen,
|
|
||||||
});
|
});
|
||||||
return callback(null, contentMD5);
|
},
|
||||||
});
|
(options, callback) => {
|
||||||
|
metadataStoreParams.versionId = options.versionId;
|
||||||
|
metadataStoreParams.versioning = options.versioning;
|
||||||
|
metadataStoreParams.isNull = options.isNull;
|
||||||
|
metadataStoreParams.nullVersionId = options.nullVersionId;
|
||||||
|
return _storeInMDandDeleteData(bucketName, dataGetInfoArr,
|
||||||
|
cipherBundle, metadataStoreParams,
|
||||||
|
options.deleteData ? dataToDelete : undefined,
|
||||||
|
requestLogger, callback);
|
||||||
|
},
|
||||||
|
], callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* PUT Object in the requested bucket. Steps include:
|
* PUT Object in the requested bucket. Steps include:
|
||||||
* validating metadata for authorization, bucket and object existence etc.
|
* validating metadata for authorization, bucket and object existence etc.
|
||||||
|
@ -166,7 +158,6 @@ function _storeIt(bucketName, objectKey, objMD, authInfo, canonicalID,
|
||||||
* @param {Function} callback - final callback to call with the result
|
* @param {Function} callback - final callback to call with the result
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
export default
|
|
||||||
function objectPut(authInfo, request, streamingV4Params, log, callback) {
|
function objectPut(authInfo, request, streamingV4Params, log, callback) {
|
||||||
log.debug('processing request', { method: 'objectPut' });
|
log.debug('processing request', { method: 'objectPut' });
|
||||||
if (!aclUtils.checkGrantHeaderValidity(request.headers)) {
|
if (!aclUtils.checkGrantHeaderValidity(request.headers)) {
|
||||||
|
@ -175,18 +166,13 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
|
||||||
}
|
}
|
||||||
const bucketName = request.bucketName;
|
const bucketName = request.bucketName;
|
||||||
const objectKey = request.objectKey;
|
const objectKey = request.objectKey;
|
||||||
const valParams = {
|
const requestType = 'objectPut';
|
||||||
authInfo,
|
const valParams = { authInfo, bucketName, objectKey, requestType, log };
|
||||||
bucketName,
|
|
||||||
objectKey,
|
|
||||||
requestType: 'objectPut',
|
|
||||||
log,
|
|
||||||
};
|
|
||||||
const canonicalID = authInfo.getCanonicalID();
|
const canonicalID = authInfo.getCanonicalID();
|
||||||
log.trace('owner canonicalID to send to data', { canonicalID });
|
log.trace('owner canonicalID to send to data', { canonicalID });
|
||||||
|
|
||||||
return services.metadataValidateAuthorization(valParams, (err, bucket,
|
return services.metadataValidateAuthorization(valParams,
|
||||||
objMD) => {
|
(err, bucket, objMD) => {
|
||||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
||||||
request.method, bucket);
|
request.method, bucket);
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -196,74 +182,50 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
|
||||||
});
|
});
|
||||||
return callback(err, null, corsHeaders);
|
return callback(err, null, corsHeaders);
|
||||||
}
|
}
|
||||||
if (bucket.hasDeletedFlag() &&
|
if (bucket.hasDeletedFlag() && canonicalID !== bucket.getOwner()) {
|
||||||
canonicalID !== bucket.getOwner()) {
|
|
||||||
log.trace('deleted flag on bucket and request ' +
|
log.trace('deleted flag on bucket and request ' +
|
||||||
'from non-owner account');
|
'from non-owner account');
|
||||||
return callback(errors.NoSuchBucket);
|
return callback(errors.NoSuchBucket);
|
||||||
}
|
}
|
||||||
const serverSideEncryption = bucket.getServerSideEncryption();
|
return async.waterfall([
|
||||||
if (bucket.hasTransientFlag() ||
|
callback => {
|
||||||
bucket.hasDeletedFlag()) {
|
if (bucket.hasTransientFlag() || bucket.hasDeletedFlag()) {
|
||||||
log.trace('transient or deleted flag so cleaning up bucket');
|
return cleanUpBucket(bucket, canonicalID, log, callback);
|
||||||
return cleanUpBucket(bucket,
|
}
|
||||||
canonicalID, log, err => {
|
return callback();
|
||||||
if (err) {
|
},
|
||||||
log.debug('error cleaning up bucket with flag',
|
callback => {
|
||||||
{ error: err,
|
const serverSideEncryption = bucket.getServerSideEncryption();
|
||||||
transientFlag:
|
if (serverSideEncryption) {
|
||||||
bucket.hasTransientFlag(),
|
return kms.createCipherBundle(
|
||||||
deletedFlag:
|
serverSideEncryption, log, callback);
|
||||||
bucket.hasDeletedFlag(),
|
}
|
||||||
});
|
return callback(null, null);
|
||||||
// To avoid confusing user with error
|
},
|
||||||
// from cleaning up
|
(cipherBundle, callback) => createAndStoreObject(bucketName,
|
||||||
// bucket return InternalError
|
bucket, objectKey, objMD, authInfo, canonicalID, cipherBundle,
|
||||||
return callback(errors.InternalError, null,
|
request, streamingV4Params, log, callback),
|
||||||
corsHeaders);
|
], (err, res) => {
|
||||||
}
|
if (err) {
|
||||||
if (serverSideEncryption) {
|
return callback(err, null, corsHeaders);
|
||||||
return kms.createCipherBundle(
|
}
|
||||||
serverSideEncryption,
|
const newByteLength = request.parsedContentLength;
|
||||||
log, (err, cipherBundle) => {
|
const oldByteLength = objMD ? objMD['content-length'] : null;
|
||||||
if (err) {
|
pushMetric('putObject', log, { authInfo, bucket: bucketName,
|
||||||
return callback(errors.InternalError,
|
newByteLength, oldByteLength });
|
||||||
null, corsHeaders);
|
if (res) {
|
||||||
}
|
corsHeaders.ETag = `"${res.contentMD5}"`;
|
||||||
return _storeIt(bucketName, objectKey,
|
}
|
||||||
objMD, authInfo, canonicalID,
|
const vcfg = bucket.getVersioningConfiguration();
|
||||||
cipherBundle, request,
|
if (vcfg && vcfg.Status === 'Enabled') {
|
||||||
streamingV4Params, log,
|
if (res && res.versionId) {
|
||||||
(err, contentMD5) =>
|
corsHeaders['x-amz-version-id'] =
|
||||||
callback(err, contentMD5,
|
VID.encrypt(res.versionId);
|
||||||
corsHeaders));
|
}
|
||||||
});
|
}
|
||||||
}
|
return callback(null, res, corsHeaders);
|
||||||
return _storeIt(bucketName, objectKey, objMD,
|
});
|
||||||
authInfo, canonicalID, null, request,
|
|
||||||
streamingV4Params, log,
|
|
||||||
(err, contentMD5) =>
|
|
||||||
callback(err, contentMD5, corsHeaders));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (serverSideEncryption) {
|
|
||||||
return kms.createCipherBundle(
|
|
||||||
serverSideEncryption,
|
|
||||||
log, (err, cipherBundle) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(errors.InternalError, null,
|
|
||||||
corsHeaders);
|
|
||||||
}
|
|
||||||
return _storeIt(bucketName, objectKey, objMD,
|
|
||||||
authInfo, canonicalID, cipherBundle,
|
|
||||||
request, streamingV4Params, log,
|
|
||||||
(err, contentMD5) =>
|
|
||||||
callback(err, contentMD5, corsHeaders));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return _storeIt(bucketName, objectKey, objMD, authInfo, canonicalID,
|
|
||||||
null, request, streamingV4Params, log,
|
|
||||||
(err, contentMD5) =>
|
|
||||||
callback(err, contentMD5, corsHeaders));
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module.exports = { createAndStoreObject, objectPut };
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { errors } from 'arsenal';
|
import { errors, versioning } from 'arsenal';
|
||||||
import async from 'async';
|
import async from 'async';
|
||||||
|
|
||||||
import acl from '../metadata/acl';
|
import acl from '../metadata/acl';
|
||||||
|
@ -9,6 +9,8 @@ import constants from '../../constants';
|
||||||
import services from '../services';
|
import services from '../services';
|
||||||
import vault from '../auth/vault';
|
import vault from '../auth/vault';
|
||||||
|
|
||||||
|
const VID = versioning.VersionID;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Format of xml request:
|
Format of xml request:
|
||||||
|
|
||||||
|
@ -68,6 +70,17 @@ export default function objectPutACL(authInfo, request, log, cb) {
|
||||||
requestType: 'objectPutACL',
|
requestType: 'objectPutACL',
|
||||||
log,
|
log,
|
||||||
};
|
};
|
||||||
|
let reqVersionId = request.query ? request.query.versionId : undefined;
|
||||||
|
if (reqVersionId && reqVersionId !== 'null') {
|
||||||
|
try {
|
||||||
|
reqVersionId = VID.decrypt(reqVersionId);
|
||||||
|
} catch (exception) { // eslint-disable-line
|
||||||
|
return cb(errors.InvalidArgument
|
||||||
|
.customizeDescription('Invalid version id specified'), null);
|
||||||
|
}
|
||||||
|
metadataValParams.versionId = reqVersionId;
|
||||||
|
}
|
||||||
|
|
||||||
const possibleGrants = ['FULL_CONTROL', 'WRITE_ACP', 'READ', 'READ_ACP'];
|
const possibleGrants = ['FULL_CONTROL', 'WRITE_ACP', 'READ', 'READ_ACP'];
|
||||||
const addACLParams = {
|
const addACLParams = {
|
||||||
Canned: '',
|
Canned: '',
|
||||||
|
@ -88,11 +101,42 @@ export default function objectPutACL(authInfo, request, log, cb) {
|
||||||
request.headers['x-amz-grant-full-control'], 'FULL_CONTROL');
|
request.headers['x-amz-grant-full-control'], 'FULL_CONTROL');
|
||||||
|
|
||||||
return async.waterfall([
|
return async.waterfall([
|
||||||
next => services.metadataValidateAuthorization(metadataValParams, next),
|
next => services.metadataValidateAuthorization(metadataValParams,
|
||||||
(bucket, objectMD, next) => {
|
(err, bucket, objectMD) => {
|
||||||
if (!objectMD) {
|
if (err) {
|
||||||
return next(errors.NoSuchKey, bucket);
|
return next(err);
|
||||||
}
|
}
|
||||||
|
if (!objectMD) {
|
||||||
|
const err = reqVersionId ? errors.NoSuchVersion :
|
||||||
|
errors.NoSuchKey;
|
||||||
|
return next(err, bucket);
|
||||||
|
}
|
||||||
|
if (!reqVersionId) {
|
||||||
|
return next(null, bucket, objectMD);
|
||||||
|
}
|
||||||
|
if (reqVersionId !== 'null') {
|
||||||
|
return next(null, bucket, objectMD);
|
||||||
|
}
|
||||||
|
if (objectMD.isNull || (objectMD && !objectMD.versionId)) {
|
||||||
|
return next(null, bucket, objectMD);
|
||||||
|
}
|
||||||
|
if (objectMD.nullVersionId === undefined) {
|
||||||
|
return next(errors.NoSuchVersion, bucket);
|
||||||
|
}
|
||||||
|
metadataValParams.versionId = objectMD.nullVersionId;
|
||||||
|
return services.metadataValidateAuthorization(metadataValParams,
|
||||||
|
(err, bucket, objectMD) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
if (!objectMD) {
|
||||||
|
return next(errors.NoSuchVersion, bucket);
|
||||||
|
}
|
||||||
|
return next(null, bucket, objectMD);
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
(bucket, objectMD, next) => {
|
||||||
|
metadataValParams.versionId = objectMD.versionId;
|
||||||
// If not setting acl through headers, parse body
|
// If not setting acl through headers, parse body
|
||||||
let jsonGrants;
|
let jsonGrants;
|
||||||
let aclOwnerID;
|
let aclOwnerID;
|
||||||
|
@ -241,23 +285,36 @@ export default function objectPutACL(authInfo, request, log, cb) {
|
||||||
},
|
},
|
||||||
function waterfall4(bucket, objectMD, ACLParams, next) {
|
function waterfall4(bucket, objectMD, ACLParams, next) {
|
||||||
// Add acl's to object metadata
|
// Add acl's to object metadata
|
||||||
acl.addObjectACL(bucket, objectKey, objectMD, ACLParams, log, next);
|
const params = metadataValParams.versionId ?
|
||||||
|
{ versionId: metadataValParams.versionId } : {};
|
||||||
|
acl.addObjectACL(bucket, objectKey, objectMD,
|
||||||
|
ACLParams, params, log, err => next(err, bucket, objectMD));
|
||||||
},
|
},
|
||||||
], (err, bucket) => {
|
], (err, bucket, objectMD) => {
|
||||||
const corsHeaders = collectCorsHeaders(request.headers.origin,
|
const resHeaders = collectCorsHeaders(request.headers.origin,
|
||||||
request.method, bucket);
|
request.method, bucket);
|
||||||
if (err) {
|
if (err) {
|
||||||
log.trace('error processing request', {
|
log.trace('error processing request', {
|
||||||
error: err,
|
error: err,
|
||||||
method: 'objectPutACL',
|
method: 'objectPutACL',
|
||||||
});
|
});
|
||||||
return cb(err, corsHeaders);
|
return cb(err, resHeaders);
|
||||||
|
}
|
||||||
|
// if versioning is enabled or suspended, return version id in
|
||||||
|
// response headers
|
||||||
|
if (bucket.getVersioningConfiguration()) {
|
||||||
|
if (objectMD.isNull || (objectMD && !objectMD.versionId)) {
|
||||||
|
resHeaders['x-amz-version-id'] = 'null';
|
||||||
|
} else {
|
||||||
|
resHeaders['x-amz-version-id'] =
|
||||||
|
VID.encrypt(objectMD.versionId);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
log.trace('processed request successfully in object put acl api');
|
log.trace('processed request successfully in object put acl api');
|
||||||
pushMetric('putObjectAcl', log, {
|
pushMetric('putObjectAcl', log, {
|
||||||
authInfo,
|
authInfo,
|
||||||
bucket: bucketName,
|
bucket: bucketName,
|
||||||
});
|
});
|
||||||
return cb(null, corsHeaders);
|
return cb(null, resHeaders);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import async from 'async';
|
import async from 'async';
|
||||||
import { errors } from 'arsenal';
|
import { errors, versioning } from 'arsenal';
|
||||||
|
|
||||||
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
import collectCorsHeaders from '../utilities/collectCorsHeaders';
|
||||||
import constants from '../../constants';
|
import constants from '../../constants';
|
||||||
|
@ -12,6 +12,8 @@ import services from '../services';
|
||||||
import setUpCopyLocator from './apiUtils/object/setUpCopyLocator';
|
import setUpCopyLocator from './apiUtils/object/setUpCopyLocator';
|
||||||
import validateHeaders from '../utilities/validateHeaders';
|
import validateHeaders from '../utilities/validateHeaders';
|
||||||
|
|
||||||
|
const VID = versioning.VersionID;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* PUT Part Copy during a multipart upload.
|
* PUT Part Copy during a multipart upload.
|
||||||
|
@ -21,13 +23,14 @@ import validateHeaders from '../utilities/validateHeaders';
|
||||||
* includes normalized headers
|
* includes normalized headers
|
||||||
* @param {string} sourceBucket - name of source bucket for object copy
|
* @param {string} sourceBucket - name of source bucket for object copy
|
||||||
* @param {string} sourceObject - name of source object for object copy
|
* @param {string} sourceObject - name of source object for object copy
|
||||||
|
* @param {string} sourceVersionId - versionId of the source object for copy
|
||||||
* @param {object} log - the request logger
|
* @param {object} log - the request logger
|
||||||
* @param {function} callback - final callback to call with the result
|
* @param {function} callback - final callback to call with the result
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
export default
|
export default
|
||||||
function objectPutCopyPart(authInfo, request, sourceBucket,
|
function objectPutCopyPart(authInfo, request, sourceBucket,
|
||||||
sourceObject, log, callback) {
|
sourceObject, sourceVersionId, log, callback) {
|
||||||
log.debug('processing request', { method: 'objectPutCopyPart' });
|
log.debug('processing request', { method: 'objectPutCopyPart' });
|
||||||
const destBucketName = request.bucketName;
|
const destBucketName = request.bucketName;
|
||||||
const destObjectKey = request.objectKey;
|
const destObjectKey = request.objectKey;
|
||||||
|
@ -35,6 +38,7 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
||||||
authInfo,
|
authInfo,
|
||||||
bucketName: sourceBucket,
|
bucketName: sourceBucket,
|
||||||
objectKey: sourceObject,
|
objectKey: sourceObject,
|
||||||
|
versionId: sourceVersionId,
|
||||||
requestType: 'objectGet',
|
requestType: 'objectGet',
|
||||||
log,
|
log,
|
||||||
};
|
};
|
||||||
|
@ -322,6 +326,10 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
|
||||||
serverSideEncryption.masterKeyId;
|
serverSideEncryption.masterKeyId;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (sourceVersionId) {
|
||||||
|
additionalHeaders['x-amz-copy-source-version-id'] =
|
||||||
|
VID.encrypt(sourceVersionId);
|
||||||
|
}
|
||||||
// TODO push metric for objectPutCopyPart
|
// TODO push metric for objectPutCopyPart
|
||||||
// pushMetric('putObjectCopyPart', log, {
|
// pushMetric('putObjectCopyPart', log, {
|
||||||
// bucket: destBucketName,
|
// bucket: destBucketName,
|
||||||
|
|
|
@ -0,0 +1,88 @@
|
||||||
|
// VersionID format:
|
||||||
|
// timestamp sequential_position site_id other_information
|
||||||
|
// where:
|
||||||
|
// - timestamp 14 bytes epoch in ms (good untill 5138)
|
||||||
|
// - sequential_position 06 bytes position in the ms slot (1B ops)
|
||||||
|
// - site_id 05 bytes site identifier (like PARIS)
|
||||||
|
// - other_information arbitrary user input, such as a unique string
|
||||||
|
|
||||||
|
// the lengths of the components in bytes
|
||||||
|
const LENGTH_TS = 14; // timestamp: epoch in ms
|
||||||
|
const LENGTH_SQ = 6; // position in ms slot
|
||||||
|
const LENGTH_ST = 5; // site identifier
|
||||||
|
|
||||||
|
// empty string template for the variables in a versionId
|
||||||
|
const TEMPLATE_TS = new Array(LENGTH_TS + 1).join('0');
|
||||||
|
const TEMPLATE_SQ = new Array(LENGTH_SQ + 1).join('0');
|
||||||
|
const TEMPLATE_ST = new Array(LENGTH_ST + 1).join(' ');
|
||||||
|
|
||||||
|
// site identifier, like PARIS, TOKYO; will be trimmed if exceeding max length
|
||||||
|
const SITE_ID = `${process.env.SITE_ID}${TEMPLATE_ST}`.slice(0, LENGTH_ST);
|
||||||
|
|
||||||
|
// constants for max epoch and max sequential number in the same epoch
|
||||||
|
const MAX_TS = Math.pow(10, LENGTH_TS) - 1; // good until 16 Nov 5138
|
||||||
|
const MAX_SQ = Math.pow(10, LENGTH_SQ) - 1; // good for 1 billion ops
|
||||||
|
|
||||||
|
// the earliest versionId, used for versions before versioning
|
||||||
|
const VID_INF = `${TEMPLATE_TS}${MAX_TS}`.slice(-LENGTH_TS) +
|
||||||
|
`${TEMPLATE_SQ}${MAX_SQ}`.slice(-LENGTH_SQ) + SITE_ID;
|
||||||
|
|
||||||
|
// internal state of the module
|
||||||
|
let prvts = 0; // epoch of the last versionId
|
||||||
|
let prvsq = 0; // sequential number of the last versionId
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function ACTIVELY (wastes CPU cycles and) waits for an amount of time
|
||||||
|
* before returning to the caller. This should not be used frequently.
|
||||||
|
*
|
||||||
|
* @param {Number} span - time to wait in nanoseconds (1/1000000 millisecond)
|
||||||
|
* @return {Undefined} - nothing
|
||||||
|
*/
|
||||||
|
function wait(span) {
|
||||||
|
function getspan(diff) {
|
||||||
|
return diff[0] * 1e9 + diff[1];
|
||||||
|
}
|
||||||
|
const start = process.hrtime();
|
||||||
|
while (getspan(process.hrtime(start)) < span);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function returns a "versionId" string indicating the current time as a
|
||||||
|
* combination of the current time in millisecond, the position of the request
|
||||||
|
* in that millisecond, and the identifier of the local site (which could be
|
||||||
|
* datacenter, region, or server depending on the notion of geographics). This
|
||||||
|
* function is stateful which means it keeps some values in the memory and the
|
||||||
|
* next call depends on the previous call.
|
||||||
|
*
|
||||||
|
* @param {string} info - the additional info to ensure uniqueness if desired
|
||||||
|
* @return {string} - the formated versionId string
|
||||||
|
*/
|
||||||
|
function generateVersionId(info) {
|
||||||
|
// Need to wait for the millisecond slot got "flushed". We wait for
|
||||||
|
// only a single millisecond when the module is restarted, which is
|
||||||
|
// necessary for the correctness of the system. This is therefore cheap.
|
||||||
|
if (prvts === 0) {
|
||||||
|
wait(1000000);
|
||||||
|
}
|
||||||
|
// get the present epoch (in millisecond)
|
||||||
|
const ts = Date.now();
|
||||||
|
// A bit more rationale: why do we use a sequence number instead of using
|
||||||
|
// process.hrtime which gives us time in nanoseconds? The idea is that at
|
||||||
|
// any time resolution, some concurrent requests may have the same time due
|
||||||
|
// to the way the OS is queueing requests or getting clock cycles. Our
|
||||||
|
// approach however will give the time based on the position of a request
|
||||||
|
// in the queue for the same millisecond which is supposed to be unique.
|
||||||
|
|
||||||
|
// increase the position if this request is in the same epoch
|
||||||
|
prvsq = (prvts === ts) ? prvsq + 1 : 0;
|
||||||
|
prvts = ts;
|
||||||
|
|
||||||
|
// In the default cases, we reverse the chronological order of the
|
||||||
|
// timestamps so that all versions of an object can be retrieved in the
|
||||||
|
// reversed chronological order---newest versions first. This is because of
|
||||||
|
// the limitation of leveldb for listing keys in the reverse order.
|
||||||
|
return `${TEMPLATE_TS}${MAX_TS - prvts}`.slice(-LENGTH_TS) +
|
||||||
|
`${TEMPLATE_SQ}${MAX_SQ - prvsq}`.slice(-LENGTH_SQ) + SITE_ID + info;
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { generateVersionId, VID_INF };
|
|
@ -12,7 +12,7 @@ const acl = {
|
||||||
metadata.updateBucket(bucket.getName(), bucket, log, cb);
|
metadata.updateBucket(bucket.getName(), bucket, log, cb);
|
||||||
},
|
},
|
||||||
|
|
||||||
addObjectACL(bucket, objectKey, objectMD, addACLParams, log, cb, params) {
|
addObjectACL(bucket, objectKey, objectMD, addACLParams, params, log, cb) {
|
||||||
log.trace('updating object acl in metadata');
|
log.trace('updating object acl in metadata');
|
||||||
// eslint-disable-next-line no-param-reassign
|
// eslint-disable-next-line no-param-reassign
|
||||||
objectMD.acl = addACLParams;
|
objectMD.acl = addACLParams;
|
||||||
|
|
|
@ -411,36 +411,6 @@ class BucketFileInterface {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This function checks if params have a property name
|
|
||||||
* If there is add it to the finalParams
|
|
||||||
* Else do nothing
|
|
||||||
* @param {String} name - The parameter name
|
|
||||||
* @param {Object} params - The params to search
|
|
||||||
* @param {Object} extParams - The params sent to the extension
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addExtensionParam(name, params, extParams) {
|
|
||||||
if (params.hasOwnProperty(name)) {
|
|
||||||
// eslint-disable-next-line no-param-reassign
|
|
||||||
extParams[name] = params[name];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Used for advancing the last character of a string for setting upper/lower
|
|
||||||
* bounds
|
|
||||||
* For e.g., _setCharAt('demo1') results in 'demo2',
|
|
||||||
* _setCharAt('scality') results in 'scalitz'
|
|
||||||
* @param {String} str - string to be advanced
|
|
||||||
* @return {String} - modified string
|
|
||||||
*/
|
|
||||||
_setCharAt(str) {
|
|
||||||
let chr = str.charCodeAt(str.length - 1);
|
|
||||||
chr = String.fromCharCode(chr + 1);
|
|
||||||
return str.substr(0, str.length - 1) + chr;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This complex function deals with different extensions of bucket listing:
|
* This complex function deals with different extensions of bucket listing:
|
||||||
* Delimiter based search or MPU based search.
|
* Delimiter based search or MPU based search.
|
||||||
|
@ -451,39 +421,9 @@ class BucketFileInterface {
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
internalListObject(bucketName, params, log, cb) {
|
internalListObject(bucketName, params, log, cb) {
|
||||||
const requestParams = {};
|
const extName = params.listingType || 'Basic';
|
||||||
let Ext;
|
const extension = new arsenal.algorithms.list[extName](params, log);
|
||||||
const extParams = {};
|
const requestParams = extension.genMDParams();
|
||||||
// multipart upload listing
|
|
||||||
if (params.listingType === 'multipartuploads') {
|
|
||||||
Ext = arsenal.algorithms.list.MPU;
|
|
||||||
this.addExtensionParam('queryPrefixLength', params, extParams);
|
|
||||||
this.addExtensionParam('splitter', params, extParams);
|
|
||||||
if (params.keyMarker) {
|
|
||||||
requestParams.gt = `overview${params.splitter}` +
|
|
||||||
`${params.keyMarker}${params.splitter}`;
|
|
||||||
if (params.uploadIdMarker) {
|
|
||||||
requestParams.gt += `${params.uploadIdMarker}`;
|
|
||||||
}
|
|
||||||
// advance so that lower bound does not include the supplied
|
|
||||||
// markers
|
|
||||||
requestParams.gt = this._setCharAt(requestParams.gt);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ext = arsenal.algorithms.list.Delimiter;
|
|
||||||
if (params.marker) {
|
|
||||||
requestParams.gt = params.marker;
|
|
||||||
this.addExtensionParam('gt', requestParams, extParams);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this.addExtensionParam('delimiter', params, extParams);
|
|
||||||
this.addExtensionParam('maxKeys', params, extParams);
|
|
||||||
if (params.prefix) {
|
|
||||||
requestParams.start = params.prefix;
|
|
||||||
requestParams.lt = this._setCharAt(params.prefix);
|
|
||||||
this.addExtensionParam('start', requestParams, extParams);
|
|
||||||
}
|
|
||||||
const extension = new Ext(extParams, log);
|
|
||||||
this.loadDBIfExists(bucketName, log, (err, db) => {
|
this.loadDBIfExists(bucketName, log, (err, db) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
|
|
|
@ -1,9 +1,25 @@
|
||||||
import { errors, algorithms } from 'arsenal';
|
import { errors, algorithms, versioning } from 'arsenal';
|
||||||
|
|
||||||
import getMultipartUploadListing from './getMultipartUploadListing';
|
import getMultipartUploadListing from './getMultipartUploadListing';
|
||||||
import { metadata } from './metadata';
|
import { metadata } from './metadata';
|
||||||
|
|
||||||
|
const genVID = versioning.VersionID.generateVersionId;
|
||||||
|
|
||||||
const defaultMaxKeys = 1000;
|
const defaultMaxKeys = 1000;
|
||||||
|
let uidCounter = 0;
|
||||||
|
|
||||||
|
function generateVersionId() {
|
||||||
|
return genVID(uidCounter++);
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatVersionKey(key, versionId) {
|
||||||
|
return `${key}\0${versionId}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function inc(str) {
|
||||||
|
return str ? (str.slice(0, str.length - 1) +
|
||||||
|
String.fromCharCode(str.charCodeAt(str.length - 1) + 1)) : str;
|
||||||
|
}
|
||||||
|
|
||||||
const metastore = {
|
const metastore = {
|
||||||
createBucket: (bucketName, bucketMD, log, cb) => {
|
createBucket: (bucketName, bucketMD, log, cb) => {
|
||||||
|
@ -64,12 +80,36 @@ const metastore = {
|
||||||
process.nextTick(() => {
|
process.nextTick(() => {
|
||||||
metastore.getBucketAttributes(bucketName, log, err => {
|
metastore.getBucketAttributes(bucketName, log, err => {
|
||||||
// TODO: implement versioning for in-memory backend
|
// TODO: implement versioning for in-memory backend
|
||||||
const data = undefined;
|
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
|
if (params && params.versioning) {
|
||||||
|
const versionId = generateVersionId();
|
||||||
|
objVal.versionId = versionId; // eslint-disable-line
|
||||||
|
metadata.keyMaps.get(bucketName).set(objName, objVal);
|
||||||
|
// eslint-disable-next-line
|
||||||
|
objName = formatVersionKey(objName, versionId);
|
||||||
|
metadata.keyMaps.get(bucketName).set(objName, objVal);
|
||||||
|
return cb(null, `{"versionId":"${versionId}"}`);
|
||||||
|
}
|
||||||
|
if (params && params.versionId === '') {
|
||||||
|
const versionId = generateVersionId();
|
||||||
|
objVal.versionId = versionId; // eslint-disable-line
|
||||||
|
metadata.keyMaps.get(bucketName).set(objName, objVal);
|
||||||
|
return cb(null, `{"versionId":"${objVal.versionId}"}`);
|
||||||
|
} else if (params && params.versionId) {
|
||||||
|
objVal.versionId = params.versionId; // eslint-disable-line
|
||||||
|
const mst = metadata.keyMaps.get(bucketName).get(objName);
|
||||||
|
if (mst && mst.versionId === params.versionId) {
|
||||||
|
metadata.keyMaps.get(bucketName).set(objName, objVal);
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line
|
||||||
|
objName = formatVersionKey(objName, params.versionId);
|
||||||
|
metadata.keyMaps.get(bucketName).set(objName, objVal);
|
||||||
|
return cb(null, `{"versionId":"${objVal.versionId}"}`);
|
||||||
|
}
|
||||||
metadata.keyMaps.get(bucketName).set(objName, objVal);
|
metadata.keyMaps.get(bucketName).set(objName, objVal);
|
||||||
return cb(err, data);
|
return cb(null);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
@ -80,6 +120,10 @@ const metastore = {
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err, { bucket });
|
return cb(err, { bucket });
|
||||||
}
|
}
|
||||||
|
if (params && params.versionId) {
|
||||||
|
// eslint-disable-next-line
|
||||||
|
objName = formatVersionKey(objName, params.versionId);
|
||||||
|
}
|
||||||
if (!metadata.keyMaps.has(bucketName)
|
if (!metadata.keyMaps.has(bucketName)
|
||||||
|| !metadata.keyMaps.get(bucketName).has(objName)) {
|
|| !metadata.keyMaps.get(bucketName).has(objName)) {
|
||||||
return cb(null, { bucket: bucket.serialize() });
|
return cb(null, { bucket: bucket.serialize() });
|
||||||
|
@ -100,6 +144,10 @@ const metastore = {
|
||||||
if (err) {
|
if (err) {
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
|
if (params && params.versionId) {
|
||||||
|
// eslint-disable-next-line
|
||||||
|
objName = formatVersionKey(objName, params.versionId);
|
||||||
|
}
|
||||||
if (!metadata.keyMaps.has(bucketName)
|
if (!metadata.keyMaps.has(bucketName)
|
||||||
|| !metadata.keyMaps.get(bucketName).has(objName)) {
|
|| !metadata.keyMaps.get(bucketName).has(objName)) {
|
||||||
return cb(errors.NoSuchKey);
|
return cb(errors.NoSuchKey);
|
||||||
|
@ -118,6 +166,29 @@ const metastore = {
|
||||||
if (!metadata.keyMaps.get(bucketName).has(objName)) {
|
if (!metadata.keyMaps.get(bucketName).has(objName)) {
|
||||||
return cb(errors.NoSuchKey);
|
return cb(errors.NoSuchKey);
|
||||||
}
|
}
|
||||||
|
if (params && params.versionId) {
|
||||||
|
const baseKey = inc(formatVersionKey(objName, ''));
|
||||||
|
const vobjName = formatVersionKey(objName,
|
||||||
|
params.versionId);
|
||||||
|
metadata.keyMaps.get(bucketName).delete(vobjName);
|
||||||
|
const mst = metadata.keyMaps.get(bucketName).get(objName);
|
||||||
|
if (mst.versionId === params.versionId) {
|
||||||
|
const keys = [];
|
||||||
|
metadata.keyMaps.get(bucketName).forEach((val, key) => {
|
||||||
|
if (key < baseKey && key > vobjName) {
|
||||||
|
keys.push(key);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (keys.length === 0) {
|
||||||
|
metadata.keyMaps.get(bucketName).delete(objName);
|
||||||
|
return cb();
|
||||||
|
}
|
||||||
|
const key = keys.sort()[0];
|
||||||
|
const value = metadata.keyMaps.get(bucketName).get(key);
|
||||||
|
metadata.keyMaps.get(bucketName).set(objName, value);
|
||||||
|
}
|
||||||
|
return cb();
|
||||||
|
}
|
||||||
metadata.keyMaps.get(bucketName).delete(objName);
|
metadata.keyMaps.get(bucketName).delete(objName);
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
|
@ -161,22 +232,30 @@ const metastore = {
|
||||||
if (!metadata.keyMaps.has(bucketName)) {
|
if (!metadata.keyMaps.has(bucketName)) {
|
||||||
return cb(errors.NoSuchBucket);
|
return cb(errors.NoSuchBucket);
|
||||||
}
|
}
|
||||||
const keys = [];
|
|
||||||
metadata.keyMaps.get(bucketName).forEach((val, key) => {
|
|
||||||
if (marker === undefined || key > marker) {
|
|
||||||
keys.push(key);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
keys.sort();
|
|
||||||
// If marker specified, edit the keys array so it
|
// If marker specified, edit the keys array so it
|
||||||
// only contains keys that occur alphabetically after the marker
|
// only contains keys that occur alphabetically after the marker
|
||||||
const filterParameters = {
|
const listingType = params.listingType || 'Delimiter';
|
||||||
delimiter,
|
const extension = new algorithms.list[listingType](params, log);
|
||||||
start: prefix,
|
const listingParams = extension.genMDParams();
|
||||||
maxKeys: numKeys,
|
|
||||||
gt: marker,
|
const keys = [];
|
||||||
};
|
metadata.keyMaps.get(bucketName).forEach((val, key) => {
|
||||||
const Ext = new algorithms.list.Delimiter(filterParameters, log);
|
if (listingParams.gt && listingParams.gt >= key) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (listingParams.gte && listingParams.gte > key) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (listingParams.lt && key >= listingParams.lt) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (listingParams.lte && key > listingParams.lte) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return keys.push(key);
|
||||||
|
});
|
||||||
|
keys.sort();
|
||||||
|
|
||||||
// Iterate through keys array and filter keys containing
|
// Iterate through keys array and filter keys containing
|
||||||
// delimiter into response.CommonPrefixes and filter remaining
|
// delimiter into response.CommonPrefixes and filter remaining
|
||||||
|
@ -198,11 +277,11 @@ const metastore = {
|
||||||
// not filtered.
|
// not filtered.
|
||||||
// Also, Ext.filter returns false when hit max keys.
|
// Also, Ext.filter returns false when hit max keys.
|
||||||
// What a nifty function!
|
// What a nifty function!
|
||||||
if (!Ext.filter(obj)) {
|
if (extension.filter(obj) < 0) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return cb(null, Ext.result());
|
return cb(null, extension.result());
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
@ -125,17 +125,47 @@ const metadata = {
|
||||||
},
|
},
|
||||||
|
|
||||||
listObject: (bucketName, listingParams, log, cb) => {
|
listObject: (bucketName, listingParams, log, cb) => {
|
||||||
client
|
if (listingParams.listingType === undefined) {
|
||||||
.listObject(bucketName, listingParams,
|
// eslint-disable-next-line
|
||||||
log, (err, data) => {
|
listingParams.listingType = 'Delimiter';
|
||||||
log.debug('getting object listing from metadata');
|
}
|
||||||
if (err) {
|
client.listObject(bucketName, listingParams, log, (err, data) => {
|
||||||
log.debug('error from metadata', { implName, err });
|
log.debug('getting object listing from metadata');
|
||||||
return cb(err);
|
if (err) {
|
||||||
}
|
log.debug('error from metadata', { implName, err });
|
||||||
log.debug('object listing retrieved from metadata');
|
return cb(err);
|
||||||
return cb(err, data);
|
}
|
||||||
});
|
log.debug('object listing retrieved from metadata');
|
||||||
|
if (listingParams.listingType === 'DelimiterVersions') {
|
||||||
|
return cb(err, data);
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line
|
||||||
|
data.Contents = data.Contents.map(entry => {
|
||||||
|
const tmp = JSON.parse(entry.value);
|
||||||
|
return {
|
||||||
|
key: entry.key,
|
||||||
|
value: {
|
||||||
|
Size: tmp['content-length'],
|
||||||
|
ETag: tmp['content-md5'],
|
||||||
|
VersionId: tmp.versionId,
|
||||||
|
IsNull: tmp.isNull,
|
||||||
|
IsDeleteMarker: tmp.isDeleteMarker,
|
||||||
|
LastModified: tmp['last-modified'],
|
||||||
|
Owner: {
|
||||||
|
DisplayName: tmp['owner-display-name'],
|
||||||
|
ID: tmp['owner-id'],
|
||||||
|
},
|
||||||
|
StorageClass: tmp['x-amz-storage-class'],
|
||||||
|
Initiated: tmp.initiated,
|
||||||
|
Initiator: tmp.initiator,
|
||||||
|
EventualStorageBucket: tmp.eventualStorageBucket,
|
||||||
|
partLocations: tmp.partLocations,
|
||||||
|
creationDate: tmp.creationDate,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
});
|
||||||
|
return cb(err, data);
|
||||||
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
listMultipartUploads: (bucketName, listingParams, log, cb) => {
|
listMultipartUploads: (bucketName, listingParams, log, cb) => {
|
||||||
|
|
|
@ -93,7 +93,7 @@ export default function routePUT(request, response, log, statsClient) {
|
||||||
if (mfaDelete) {
|
if (mfaDelete) {
|
||||||
log.debug('mfa deletion is not implemented');
|
log.debug('mfa deletion is not implemented');
|
||||||
return routesUtils.responseNoBody(
|
return routesUtils.responseNoBody(
|
||||||
errors.NotImplemented.customizedDescription(
|
errors.NotImplemented.customizeDescription(
|
||||||
'MFA Deletion is not supported yet.'), null,
|
'MFA Deletion is not supported yet.'), null,
|
||||||
response, null, log);
|
response, null, log);
|
||||||
}
|
}
|
||||||
|
@ -256,16 +256,14 @@ export default function routePUT(request, response, log, statsClient) {
|
||||||
});
|
});
|
||||||
|
|
||||||
api.callApiMethod('objectPut', request, log,
|
api.callApiMethod('objectPut', request, log,
|
||||||
(err, contentMD5, corsHeaders) => {
|
(err, res, corsHeaders) => { // eslint-disable-line
|
||||||
if (err) {
|
if (err) {
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
response, 200, log);
|
response, 200, log);
|
||||||
}
|
}
|
||||||
// ETag's hex should always be enclosed in quotes
|
// ETag's hex should always be enclosed in quotes
|
||||||
statsReport500(err, statsClient);
|
statsReport500(err, statsClient);
|
||||||
const resMetaHeaders = corsHeaders || {};
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
resMetaHeaders.ETag = `"${contentMD5}"`;
|
|
||||||
return routesUtils.responseNoBody(err, resMetaHeaders,
|
|
||||||
response, 200, log);
|
response, 200, log);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -101,11 +101,19 @@ function errorXMLResponse(errCode, response, log, corsHeaders) {
|
||||||
log.addDefaultFields({
|
log.addDefaultFields({
|
||||||
bytesSent,
|
bytesSent,
|
||||||
});
|
});
|
||||||
|
if (corsHeaders) {
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
corsHeaders['Content-Type'] = 'application/xml';
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
corsHeaders['Content-Length'] = xmlStr.length;
|
||||||
|
}
|
||||||
setCommonResponseHeaders(corsHeaders, response, log);
|
setCommonResponseHeaders(corsHeaders, response, log);
|
||||||
response.writeHead(errCode.code, { 'Content-type': 'application/xml' });
|
response.writeHead(errCode.code, { 'Content-type': 'application/xml' });
|
||||||
return response.end(xmlStr, 'utf8', () => {
|
return response.end(xmlStr, 'utf8', () => {
|
||||||
log.end().info('responded with error XML', {
|
log.end().info('responded with error XML', {
|
||||||
httpCode: response.statusCode,
|
httpCode: response.statusCode,
|
||||||
|
xmlStr,
|
||||||
|
corsHeaders,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
283
lib/services.js
283
lib/services.js
|
@ -1,7 +1,7 @@
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
|
||||||
import async from 'async';
|
import async from 'async';
|
||||||
import { errors } from 'arsenal';
|
import { errors, versioning } from 'arsenal';
|
||||||
|
|
||||||
import BucketInfo from './metadata/BucketInfo';
|
import BucketInfo from './metadata/BucketInfo';
|
||||||
import bucketShield from './api/apiUtils/bucket/bucketShield';
|
import bucketShield from './api/apiUtils/bucket/bucketShield';
|
||||||
|
@ -14,10 +14,11 @@ import metadata from './metadata/wrapper';
|
||||||
import { logger } from './utilities/logger';
|
import { logger } from './utilities/logger';
|
||||||
import removeAWSChunked from './api/apiUtils/object/removeAWSChunked';
|
import removeAWSChunked from './api/apiUtils/object/removeAWSChunked';
|
||||||
|
|
||||||
|
const VID_INF = versioning.VersionID.VID_INF;
|
||||||
|
|
||||||
const usersBucket = constants.usersBucket;
|
const usersBucket = constants.usersBucket;
|
||||||
const oldUsersBucket = constants.oldUsersBucket;
|
const oldUsersBucket = constants.oldUsersBucket;
|
||||||
|
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
getService(authInfo, request, log, splitter, cb, overrideUserbucket) {
|
getService(authInfo, request, log, splitter, cb, overrideUserbucket) {
|
||||||
const canonicalID = authInfo.getCanonicalID();
|
const canonicalID = authInfo.getCanonicalID();
|
||||||
|
@ -99,8 +100,12 @@ export default {
|
||||||
return cb(null, bucket, null);
|
return cb(null, bucket, null);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return metadata.getBucketAndObjectMD(bucketName, objectKey, {}, log,
|
let versionId = params.versionId;
|
||||||
(err, data) => {
|
if (versionId === 'null') {
|
||||||
|
versionId = undefined;
|
||||||
|
}
|
||||||
|
return metadata.getBucketAndObjectMD(bucketName, objectKey,
|
||||||
|
{ versionId }, log, (err, data) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.debug('metadata get failed', { error: err });
|
log.debug('metadata get failed', { error: err });
|
||||||
return cb(err);
|
return cb(err);
|
||||||
|
@ -130,6 +135,18 @@ export default {
|
||||||
log.trace('Bucket found', { bucketName });
|
log.trace('Bucket found', { bucketName });
|
||||||
return cb(null, bucket, null);
|
return cb(null, bucket, null);
|
||||||
}
|
}
|
||||||
|
if (params.versionId === 'null') {
|
||||||
|
if (obj.nullVersionId && !obj.isNull) {
|
||||||
|
log.debug('null version exists, get the null version');
|
||||||
|
params.versionId = obj.nullVersionId; // eslint-disable-line
|
||||||
|
return this.metadataValidateAuthorization(params, cb);
|
||||||
|
}
|
||||||
|
if (obj.versionId && !obj.isNull) {
|
||||||
|
log.debug('null version does not exists');
|
||||||
|
return cb(null, bucket, null);
|
||||||
|
}
|
||||||
|
// otherwise the master version is the null version
|
||||||
|
}
|
||||||
// TODO: Add bucket policy and IAM checks
|
// TODO: Add bucket policy and IAM checks
|
||||||
if (!isObjAuthorized(bucket, obj, requestType, canonicalID)) {
|
if (!isObjAuthorized(bucket, obj, requestType, canonicalID)) {
|
||||||
log.debug('access denied for user on object', { requestType });
|
log.debug('access denied for user on object', { requestType });
|
||||||
|
@ -171,7 +188,7 @@ export default {
|
||||||
const { objectKey, authInfo, size, contentMD5, metaHeaders,
|
const { objectKey, authInfo, size, contentMD5, metaHeaders,
|
||||||
contentType, cacheControl, contentDisposition, contentEncoding,
|
contentType, cacheControl, contentDisposition, contentEncoding,
|
||||||
expires, multipart, headers, overrideMetadata, log,
|
expires, multipart, headers, overrideMetadata, log,
|
||||||
lastModifiedDate } = params;
|
lastModifiedDate, versioning, versionId } = params;
|
||||||
log.trace('storing object in metadata');
|
log.trace('storing object in metadata');
|
||||||
assert.strictEqual(typeof bucketName, 'string');
|
assert.strictEqual(typeof bucketName, 'string');
|
||||||
const omVal = {};
|
const omVal = {};
|
||||||
|
@ -196,7 +213,7 @@ export default {
|
||||||
omVal['last-modified'] = lastModifiedDate || new Date().toJSON();
|
omVal['last-modified'] = lastModifiedDate || new Date().toJSON();
|
||||||
omVal['content-md5'] = contentMD5;
|
omVal['content-md5'] = contentMD5;
|
||||||
|
|
||||||
omVal['x-amz-server-version-id'] = '';
|
// omVal['x-amz-server-version-id'] = '';
|
||||||
// TODO: Handle this as a utility function for all object puts
|
// TODO: Handle this as a utility function for all object puts
|
||||||
// similar to normalizing request but after checkAuth so
|
// similar to normalizing request but after checkAuth so
|
||||||
// string to sign is not impacted. This is GH Issue#89.
|
// string to sign is not impacted. This is GH Issue#89.
|
||||||
|
@ -215,7 +232,7 @@ export default {
|
||||||
}
|
}
|
||||||
|
|
||||||
// simple/no version. will expand once object versioning is introduced
|
// simple/no version. will expand once object versioning is introduced
|
||||||
omVal['x-amz-version-id'] = 'null';
|
// omVal['x-amz-version-id'] = 'null';
|
||||||
omVal.acl = {
|
omVal.acl = {
|
||||||
Canned: 'private',
|
Canned: 'private',
|
||||||
FULL_CONTROL: [],
|
FULL_CONTROL: [],
|
||||||
|
@ -224,6 +241,20 @@ export default {
|
||||||
READ_ACP: [],
|
READ_ACP: [],
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const options = {};
|
||||||
|
if (versioning) {
|
||||||
|
options.versioning = versioning;
|
||||||
|
}
|
||||||
|
if (versionId || versionId === '') {
|
||||||
|
options.versionId = versionId;
|
||||||
|
}
|
||||||
|
omVal.isNull = params.isNull;
|
||||||
|
omVal.nullVersionId = params.nullVersionId;
|
||||||
|
omVal.isDeleteMarker = params.isDeleteMarker;
|
||||||
|
if (versionId && versionId !== 'null') {
|
||||||
|
omVal.versionId = versionId;
|
||||||
|
}
|
||||||
|
|
||||||
// Store user provided metadata. TODO: limit size.
|
// Store user provided metadata. TODO: limit size.
|
||||||
// For multipart upload this also serves to transfer
|
// For multipart upload this also serves to transfer
|
||||||
// over metadata originally sent with the initiation
|
// over metadata originally sent with the initiation
|
||||||
|
@ -245,41 +276,50 @@ export default {
|
||||||
log.trace('object metadata', { omVal });
|
log.trace('object metadata', { omVal });
|
||||||
// If this is not the completion of a multipart upload
|
// If this is not the completion of a multipart upload
|
||||||
// parse the headers to get the ACL's if any
|
// parse the headers to get the ACL's if any
|
||||||
if (!multipart) {
|
async.waterfall([
|
||||||
const parseAclParams = {
|
callback => {
|
||||||
headers,
|
if (multipart) {
|
||||||
resourceType: 'object',
|
return callback();
|
||||||
acl: omVal.acl,
|
|
||||||
log,
|
|
||||||
};
|
|
||||||
log.trace('parsing acl from headers');
|
|
||||||
acl.parseAclFromHeaders(parseAclParams, (err, parsedACL) => {
|
|
||||||
if (err) {
|
|
||||||
log.debug('error parsing acl', { error: err });
|
|
||||||
return cb(err);
|
|
||||||
}
|
}
|
||||||
omVal.acl = parsedACL;
|
const parseAclParams = {
|
||||||
metadata.putObjectMD(bucketName, objectKey, omVal, {}, log,
|
headers,
|
||||||
err => {
|
resourceType: 'object',
|
||||||
|
acl: omVal.acl,
|
||||||
|
log,
|
||||||
|
};
|
||||||
|
log.trace('parsing acl from headers');
|
||||||
|
acl.parseAclFromHeaders(parseAclParams, (err, parsedACL) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('error from metadata', { error: err });
|
log.warn('error parsing acl', { error: err });
|
||||||
return cb(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
log.trace('object successfully stored in metadata');
|
omVal.acl = parsedACL;
|
||||||
return cb(err, contentMD5);
|
return callback();
|
||||||
});
|
});
|
||||||
return undefined;
|
return null;
|
||||||
});
|
},
|
||||||
} else {
|
callback => metadata.putObjectMD(bucketName, objectKey, omVal,
|
||||||
metadata.putObjectMD(bucketName, objectKey, omVal, {}, log, err => {
|
options, log, callback),
|
||||||
if (err) {
|
], (err, data) => {
|
||||||
log.error('error from metadata', { error: err });
|
if (err) {
|
||||||
return cb(err);
|
log.error('error from metadata', { error: err });
|
||||||
|
return cb(err);
|
||||||
|
}
|
||||||
|
log.trace('object successfully stored in metadata');
|
||||||
|
// for versioning only, other features need to process their data
|
||||||
|
|
||||||
|
let versionId = undefined;
|
||||||
|
if (data) {
|
||||||
|
if (params.isNull && params.isDeleteMarker) {
|
||||||
|
versionId = 'null';
|
||||||
|
// TODO: check if for version specific PUT request we want to
|
||||||
|
// return 'null' for versionId
|
||||||
|
} else if (!params.isNull) {
|
||||||
|
versionId = JSON.parse(data).versionId;
|
||||||
}
|
}
|
||||||
log.trace('object successfully stored in metadata');
|
}
|
||||||
return cb(err, contentMD5);
|
return cb(err, { contentMD5, versionId });
|
||||||
});
|
});
|
||||||
}
|
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -287,37 +327,31 @@ export default {
|
||||||
* @param {string} bucketName - bucket in which objectMD is stored
|
* @param {string} bucketName - bucket in which objectMD is stored
|
||||||
* @param {object} objectMD - object's metadata
|
* @param {object} objectMD - object's metadata
|
||||||
* @param {string} objectKey - object key name
|
* @param {string} objectKey - object key name
|
||||||
|
* @param {object} options - other instructions, such as { versionId } to
|
||||||
|
* delete a specific version of the object
|
||||||
* @param {Log} log - logger instance
|
* @param {Log} log - logger instance
|
||||||
* @param {function} cb - callback from async.waterfall in objectGet
|
* @param {function} cb - callback from async.waterfall in objectGet
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
deleteObject(bucketName, objectMD, objectKey, log, cb) {
|
deleteObject(bucketName, objectMD, objectKey, options, log, cb) {
|
||||||
log.trace('deleting object from bucket');
|
log.trace('deleting object from bucket');
|
||||||
assert.strictEqual(typeof bucketName, 'string');
|
assert.strictEqual(typeof bucketName, 'string');
|
||||||
assert.strictEqual(typeof objectMD, 'object');
|
assert.strictEqual(typeof objectMD, 'object');
|
||||||
if (objectMD['x-amz-version-id'] === 'null') {
|
return metadata.deleteObjectMD(bucketName, objectKey, options, log,
|
||||||
log.trace('object identified as non-versioned');
|
(err, res) => {
|
||||||
// non-versioned buckets
|
if (err) {
|
||||||
log.trace('deleteObject: deleting non-versioned object');
|
return cb(err, res);
|
||||||
return metadata.deleteObjectMD(bucketName, objectKey, {}, log,
|
}
|
||||||
err => {
|
cb(null, res); // this is smart
|
||||||
if (err) {
|
log.trace('deleteObject: metadata delete OK');
|
||||||
return cb(err);
|
const deleteLog = logger.newRequestLogger();
|
||||||
}
|
if (objectMD.location === null) {
|
||||||
cb();
|
return undefined;
|
||||||
log.trace('deleteObject: metadata delete OK');
|
} else if (!Array.isArray(objectMD.location)) {
|
||||||
const deleteLog = logger.newRequestLogger();
|
return data.delete(objectMD.location, deleteLog);
|
||||||
if (objectMD.location === null) {
|
}
|
||||||
return undefined;
|
return data.batchDelete(objectMD.location, deleteLog);
|
||||||
} else if (!Array.isArray(objectMD.location)) {
|
});
|
||||||
return data.delete(objectMD.location, deleteLog);
|
|
||||||
}
|
|
||||||
return data.batchDelete(objectMD.location, deleteLog);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// versioning
|
|
||||||
log.debug('deleteObject: versioning not fully implemented');
|
|
||||||
return metadata.deleteObjectMD(bucketName, objectKey, {}, log, cb);
|
|
||||||
},
|
},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -735,4 +769,131 @@ export default {
|
||||||
metadata.deleteObjectMD(mpuBucketName, key, {}, log, callback);
|
metadata.deleteObjectMD(mpuBucketName, key, {}, log, callback);
|
||||||
}, err => cb(err));
|
}, err => cb(err));
|
||||||
},
|
},
|
||||||
|
|
||||||
|
versioningPreprocessing(bucketName, bucketMD, objectKey, objMD,
|
||||||
|
reqVersionId, log, callback) {
|
||||||
|
const options = {};
|
||||||
|
// bucket is not versioning enabled
|
||||||
|
if (!bucketMD.getVersioningConfiguration()) {
|
||||||
|
options.deleteData = true;
|
||||||
|
return callback(null, options);
|
||||||
|
}
|
||||||
|
// bucket is versioning enabled
|
||||||
|
const mstVersionId = objMD ? objMD.versionId : undefined;
|
||||||
|
const mstIsNull = objMD ? objMD.isNull : false;
|
||||||
|
const vstat = bucketMD.getVersioningConfiguration().Status;
|
||||||
|
if (!reqVersionId) {
|
||||||
|
// non-version-specific versioning operation
|
||||||
|
if (mstVersionId === undefined || mstIsNull) {
|
||||||
|
// object does not exist or is not versioned (before versioning)
|
||||||
|
if (vstat === 'Suspended') {
|
||||||
|
// versioning is suspended, overwrite the existing version
|
||||||
|
options.versionId = '';
|
||||||
|
options.isNull = true;
|
||||||
|
options.deleteData = true;
|
||||||
|
return callback(null, options);
|
||||||
|
}
|
||||||
|
// versioning is enabled, create a new version
|
||||||
|
options.versioning = true;
|
||||||
|
if (objMD) {
|
||||||
|
// store master version in a new key
|
||||||
|
const versionId = mstIsNull ? mstVersionId : VID_INF;
|
||||||
|
objMD.versionId = versionId; // eslint-disable-line
|
||||||
|
objMD.isNull = true; // eslint-disable-line
|
||||||
|
options.nullVersionId = versionId;
|
||||||
|
return metadata.putObjectMD(bucketName, objectKey, objMD,
|
||||||
|
{ versionId }, log, err => callback(err, options));
|
||||||
|
}
|
||||||
|
return callback(null, options);
|
||||||
|
}
|
||||||
|
// master is versioned and is not a null version
|
||||||
|
const nullVersionId = objMD.nullVersionId;
|
||||||
|
if (vstat === 'Suspended') {
|
||||||
|
// versioning is suspended, overwrite the existing version
|
||||||
|
options.versionId = '';
|
||||||
|
options.isNull = true;
|
||||||
|
options.deleteNullVersionData = true;
|
||||||
|
if (nullVersionId === undefined) {
|
||||||
|
return callback(null, options);
|
||||||
|
}
|
||||||
|
return metadata.deleteObjectMD(bucketName, objectKey,
|
||||||
|
{ versionId: nullVersionId }, log,
|
||||||
|
err => callback(err, options));
|
||||||
|
}
|
||||||
|
// versioning is enabled, put the new version
|
||||||
|
options.versioning = true;
|
||||||
|
options.nullVersionId = nullVersionId;
|
||||||
|
return callback(null, options);
|
||||||
|
} else if (!mstVersionId) {
|
||||||
|
// version-specific versioning operation, master is not versioned
|
||||||
|
if (vstat === 'Suspended' || reqVersionId === 'null') {
|
||||||
|
// object does not exist or is not versioned (before versioning)
|
||||||
|
options.versionId = '';
|
||||||
|
options.isNull = true;
|
||||||
|
options.deleteData = true;
|
||||||
|
return callback(null, options);
|
||||||
|
}
|
||||||
|
// TODO check AWS behaviour
|
||||||
|
return callback(errors.BadRequest);
|
||||||
|
} else if (mstIsNull) {
|
||||||
|
// master is versioned and is a null version
|
||||||
|
if (reqVersionId === 'null') {
|
||||||
|
// overwrite the existing version, make new version null
|
||||||
|
options.versionId = '';
|
||||||
|
options.isNull = true;
|
||||||
|
options.deleteData = true;
|
||||||
|
return callback(null, options);
|
||||||
|
}
|
||||||
|
// TODO check AWS behaviour
|
||||||
|
options.versionId = reqVersionId;
|
||||||
|
options.deleteData = true;
|
||||||
|
return callback(null, options);
|
||||||
|
}
|
||||||
|
// master is versioned and is not a null version
|
||||||
|
options.versionId = reqVersionId;
|
||||||
|
options.deleteData = true;
|
||||||
|
return callback(null, options);
|
||||||
|
},
|
||||||
|
|
||||||
|
preprocessingVersioningDelete(bucketName, bucketMD, objectName, objectMD,
|
||||||
|
reqVersionId, log, callback) {
|
||||||
|
const options = {};
|
||||||
|
// bucket is not versioning enabled
|
||||||
|
if (!bucketMD.getVersioningConfiguration()) {
|
||||||
|
options.deleteData = true;
|
||||||
|
return callback(null, options);
|
||||||
|
}
|
||||||
|
// bucket is versioning enabled
|
||||||
|
if (reqVersionId && reqVersionId !== 'null') {
|
||||||
|
// deleting a specific version
|
||||||
|
options.deleteData = true;
|
||||||
|
options.versionId = reqVersionId;
|
||||||
|
return callback(null, options);
|
||||||
|
}
|
||||||
|
if (reqVersionId) {
|
||||||
|
// deleting the 'null' version if it exists
|
||||||
|
if (objectMD.versionId === undefined) {
|
||||||
|
// object is not versioned, deleting it
|
||||||
|
options.deleteData = true;
|
||||||
|
return callback(null, options);
|
||||||
|
}
|
||||||
|
if (objectMD.isNull) {
|
||||||
|
// master is the null version
|
||||||
|
options.deleteData = true;
|
||||||
|
options.versionId = objectMD.versionId;
|
||||||
|
return callback(null, options);
|
||||||
|
}
|
||||||
|
if (objectMD.nullVersionId) {
|
||||||
|
// null version exists, deleting it
|
||||||
|
options.deleteData = true;
|
||||||
|
options.versionId = objectMD.nullVersionId;
|
||||||
|
return callback(null, options);
|
||||||
|
}
|
||||||
|
// null version does not exist, no deletion
|
||||||
|
// TODO check AWS behaviour for no deletion (seems having no error)
|
||||||
|
return callback(errors.NoSuchKey);
|
||||||
|
}
|
||||||
|
// not deleting any specific version, making a delete marker instead
|
||||||
|
return callback(null, options);
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
|
@ -315,7 +315,7 @@ utils.mapHeaders = function mapHeaders(headers, addHeaders) {
|
||||||
*/
|
*/
|
||||||
utils.isUnsupportedQuery = function isUnsupportedQuery(queryObj) {
|
utils.isUnsupportedQuery = function isUnsupportedQuery(queryObj) {
|
||||||
return Object.keys(queryObj)
|
return Object.keys(queryObj)
|
||||||
.some(key => (constants.unsupportedQueries.indexOf(key) > -1));
|
.some(key => constants.unsupportedQueries[key]);
|
||||||
};
|
};
|
||||||
|
|
||||||
export default utils;
|
export default utils;
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
},
|
},
|
||||||
"homepage": "https://github.com/scality/S3#readme",
|
"homepage": "https://github.com/scality/S3#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"arsenal": "scality/Arsenal",
|
"arsenal": "scality/Arsenal#ft/vsp",
|
||||||
"async": "~1.4.2",
|
"async": "~1.4.2",
|
||||||
"babel-core": "^6.5.2",
|
"babel-core": "^6.5.2",
|
||||||
"babel-plugin-transform-es2015-destructuring": "^6.5.2",
|
"babel-plugin-transform-es2015-destructuring": "^6.5.2",
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
import async from 'async';
|
||||||
|
import { S3 } from 'aws-sdk';
|
||||||
|
|
||||||
|
import getConfig from '../../test/support/config';
|
||||||
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
|
const s3 = new S3(config);
|
||||||
|
|
||||||
|
export const constants = {
|
||||||
|
versioningEnabled: { Status: 'Enabled' },
|
||||||
|
versioningSuspended: { Status: 'Suspended' },
|
||||||
|
};
|
||||||
|
|
||||||
|
function _deleteVersionList(versionList, bucket, callback) {
|
||||||
|
async.each(versionList, (versionInfo, cb) => {
|
||||||
|
const versionId = versionInfo.VersionId;
|
||||||
|
const params = { Bucket: bucket, Key: versionInfo.Key,
|
||||||
|
VersionId: versionId };
|
||||||
|
s3.deleteObject(params, cb);
|
||||||
|
}, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function removeAllVersions(params, callback) {
|
||||||
|
const bucket = params.Bucket;
|
||||||
|
async.waterfall([
|
||||||
|
cb => s3.listObjectVersions(params, cb),
|
||||||
|
(data, cb) => _deleteVersionList(data.DeleteMarkers, bucket,
|
||||||
|
err => cb(err, data)),
|
||||||
|
(data, cb) => _deleteVersionList(data.Versions, bucket,
|
||||||
|
err => cb(err, data)),
|
||||||
|
(data, cb) => {
|
||||||
|
if (data.IsTruncated) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
KeyMarker: data.NextKeyMarker,
|
||||||
|
VersionIdMarker: data.NextVersionIdMarker,
|
||||||
|
};
|
||||||
|
return removeAllVersions(params, cb);
|
||||||
|
}
|
||||||
|
return cb();
|
||||||
|
},
|
||||||
|
], callback);
|
||||||
|
}
|
|
@ -8,7 +8,11 @@ import getConfig from '../support/config';
|
||||||
const bucket = `bigmpu-test-bucket-${Date.now()}`;
|
const bucket = `bigmpu-test-bucket-${Date.now()}`;
|
||||||
const key = 'mpuKey';
|
const key = 'mpuKey';
|
||||||
const body = 'abc';
|
const body = 'abc';
|
||||||
const eTag = '900150983cd24fb0d6963f7d28e17f72';
|
const partCount = 10000;
|
||||||
|
const eTag = require('crypto').createHash('md5').update(body).digest('hex');
|
||||||
|
const finalETag = require('crypto').createHash('md5')
|
||||||
|
.update(Buffer.from(eTag.repeat(partCount), 'hex').toString('binary'),
|
||||||
|
'binary').digest('hex');
|
||||||
|
|
||||||
function uploadPart(n, uploadId, s3, next) {
|
function uploadPart(n, uploadId, s3, next) {
|
||||||
const params = {
|
const params = {
|
||||||
|
@ -63,7 +67,7 @@ describe('large mpu', function tester() {
|
||||||
// will fail on AWS because parts too small
|
// will fail on AWS because parts too small
|
||||||
|
|
||||||
itSkipIfAWS('should intiate, put parts and complete mpu ' +
|
itSkipIfAWS('should intiate, put parts and complete mpu ' +
|
||||||
'with 10,000 parts', done => {
|
`with ${partCount} parts`, done => {
|
||||||
process.stdout.write('***Running large MPU test***\n');
|
process.stdout.write('***Running large MPU test***\n');
|
||||||
let uploadId;
|
let uploadId;
|
||||||
return waterfall([
|
return waterfall([
|
||||||
|
@ -78,14 +82,14 @@ describe('large mpu', function tester() {
|
||||||
}),
|
}),
|
||||||
next => {
|
next => {
|
||||||
process.stdout.write('putting parts');
|
process.stdout.write('putting parts');
|
||||||
return timesLimit(10000, 20, (n, cb) =>
|
return timesLimit(partCount, 20, (n, cb) =>
|
||||||
uploadPart(n, uploadId, s3, cb), err =>
|
uploadPart(n, uploadId, s3, cb), err =>
|
||||||
next(err)
|
next(err)
|
||||||
);
|
);
|
||||||
},
|
},
|
||||||
next => {
|
next => {
|
||||||
const parts = [];
|
const parts = [];
|
||||||
for (let i = 1; i <= 10000; i++) {
|
for (let i = 1; i <= partCount; i++) {
|
||||||
parts.push({
|
parts.push({
|
||||||
ETag: eTag,
|
ETag: eTag,
|
||||||
PartNumber: i,
|
PartNumber: i,
|
||||||
|
@ -114,8 +118,8 @@ describe('large mpu', function tester() {
|
||||||
if (err) {
|
if (err) {
|
||||||
return next(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
assert.strictEqual(data.ETag, '"e0c3d6b4446bf8f97' +
|
assert.strictEqual(data.ETag,
|
||||||
'9c50df6d79e9e0a-10000"');
|
`"${finalETag}-${partCount}"`);
|
||||||
return next();
|
return next();
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
|
||||||
import withV4 from '../support/withV4';
|
import withV4 from '../support/withV4';
|
||||||
import BucketUtility from '../../lib/utility/bucket-util';
|
import BucketUtility from '../../lib/utility/bucket-util';
|
||||||
|
import {
|
||||||
|
constants,
|
||||||
|
removeAllVersions,
|
||||||
|
} from '../../lib/utility/versioning-util.js';
|
||||||
|
|
||||||
const date = Date.now();
|
const date = Date.now();
|
||||||
const bucket = `completempu${date}`;
|
const bucket = `completempu${date}`;
|
||||||
|
@ -14,51 +17,14 @@ function checkNoError(err) {
|
||||||
`Expected success, got error ${JSON.stringify(err)}`);
|
`Expected success, got error ${JSON.stringify(err)}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const testing = process.env.VERSIONING === 'no' ? describe.skip : describe;
|
||||||
|
|
||||||
describe('Complete MPU', () => {
|
describe('Complete MPU', () => {
|
||||||
withV4(sigCfg => {
|
withV4(sigCfg => {
|
||||||
let bucketUtil;
|
const bucketUtil = new BucketUtility('default', sigCfg);
|
||||||
let s3;
|
const s3 = bucketUtil.s3;
|
||||||
let uploadId;
|
|
||||||
let firstEtag;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
function _completeMpuAndCheckVid(uploadId, firstEtag, expectedVid, cb) {
|
||||||
bucketUtil = new BucketUtility('default', sigCfg);
|
|
||||||
s3 = bucketUtil.s3;
|
|
||||||
return s3.createBucketAsync({ Bucket: bucket })
|
|
||||||
.then(() => s3.createMultipartUploadAsync({
|
|
||||||
Bucket: bucket, Key: key }))
|
|
||||||
.then(res => {
|
|
||||||
uploadId = res.UploadId;
|
|
||||||
return s3.uploadPartAsync({ Bucket: bucket, Key: key,
|
|
||||||
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
|
|
||||||
})
|
|
||||||
.then(res => {
|
|
||||||
firstEtag = res.ETag;
|
|
||||||
return firstEtag;
|
|
||||||
})
|
|
||||||
.then(() => s3.uploadPartAsync({ Bucket: bucket, Key: key,
|
|
||||||
PartNumber: 2, UploadId: uploadId, Body: bodySecondPart }))
|
|
||||||
.catch(err => {
|
|
||||||
process.stdout.write(`Error in beforeEach: ${err}\n`);
|
|
||||||
throw err;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
process.stdout.write('Emptying bucket');
|
|
||||||
return bucketUtil.empty(bucket)
|
|
||||||
.then(() => {
|
|
||||||
process.stdout.write('Deleting bucket');
|
|
||||||
return bucketUtil.deleteOne(bucket);
|
|
||||||
})
|
|
||||||
.catch(err => {
|
|
||||||
process.stdout.write('Error in afterEach');
|
|
||||||
throw err;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should complete an MPU with fewer parts than were ' +
|
|
||||||
'originally put', done => {
|
|
||||||
s3.completeMultipartUpload({
|
s3.completeMultipartUpload({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: key,
|
Key: key,
|
||||||
|
@ -70,6 +36,12 @@ describe('Complete MPU', () => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
// to show that the mpu completed with just 1 part
|
// to show that the mpu completed with just 1 part
|
||||||
assert.strictEqual(data.ETag.slice(-3), '-1"');
|
assert.strictEqual(data.ETag.slice(-3), '-1"');
|
||||||
|
const versionId = data.VersionId;
|
||||||
|
if (expectedVid) {
|
||||||
|
assert.notEqual(versionId, undefined);
|
||||||
|
} else {
|
||||||
|
assert.strictEqual(versionId, expectedVid);
|
||||||
|
}
|
||||||
return s3.getObject({
|
return s3.getObject({
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
Key: key,
|
Key: key,
|
||||||
|
@ -78,9 +50,115 @@ describe('Complete MPU', () => {
|
||||||
checkNoError(err);
|
checkNoError(err);
|
||||||
// to show that data in completed key is just first part
|
// to show that data in completed key is just first part
|
||||||
assert.strictEqual(data.ContentLength, '10');
|
assert.strictEqual(data.ContentLength, '10');
|
||||||
done();
|
if (versionId) {
|
||||||
|
assert.strictEqual(data.VersionId, versionId);
|
||||||
|
}
|
||||||
|
cb();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
removeAllVersions({ Bucket: bucket }, err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
return s3.deleteBucket({ Bucket: bucket }, done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('on bucket without versioning configuration', () => {
|
||||||
|
let uploadId;
|
||||||
|
let firstEtag;
|
||||||
|
|
||||||
|
beforeEach(() => s3.createBucketAsync({ Bucket: bucket })
|
||||||
|
.then(() => s3.createMultipartUploadAsync({
|
||||||
|
Bucket: bucket, Key: key }))
|
||||||
|
.then(res => {
|
||||||
|
uploadId = res.UploadId;
|
||||||
|
return s3.uploadPartAsync({ Bucket: bucket, Key: key,
|
||||||
|
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
|
||||||
|
})
|
||||||
|
.then(res => {
|
||||||
|
firstEtag = res.ETag;
|
||||||
|
return firstEtag;
|
||||||
|
})
|
||||||
|
.then(() => s3.uploadPartAsync({ Bucket: bucket, Key: key,
|
||||||
|
PartNumber: 2, UploadId: uploadId, Body: bodySecondPart }))
|
||||||
|
.catch(err => {
|
||||||
|
process.stdout.write(`Error in beforeEach: ${err}\n`);
|
||||||
|
throw err;
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
it('should complete an MPU with fewer parts than were ' +
|
||||||
|
'originally put without returning a version id', done => {
|
||||||
|
_completeMpuAndCheckVid(uploadId, firstEtag, undefined, done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
testing('on bucket with enabled versioning', () => {
|
||||||
|
let uploadId;
|
||||||
|
let firstEtag;
|
||||||
|
|
||||||
|
beforeEach(() => s3.createBucketAsync({ Bucket: bucket })
|
||||||
|
.then(() => s3.putBucketVersioningAsync({ Bucket: bucket,
|
||||||
|
VersioningConfiguration: constants.versioningEnabled }))
|
||||||
|
.then(() => s3.createMultipartUploadAsync({
|
||||||
|
Bucket: bucket, Key: key }))
|
||||||
|
.then(res => {
|
||||||
|
uploadId = res.UploadId;
|
||||||
|
return s3.uploadPartAsync({ Bucket: bucket, Key: key,
|
||||||
|
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
|
||||||
|
})
|
||||||
|
.then(res => {
|
||||||
|
firstEtag = res.ETag;
|
||||||
|
return firstEtag;
|
||||||
|
})
|
||||||
|
.then(() => s3.uploadPartAsync({ Bucket: bucket, Key: key,
|
||||||
|
PartNumber: 2, UploadId: uploadId, Body: bodySecondPart }))
|
||||||
|
.catch(err => {
|
||||||
|
process.stdout.write(`Error in beforeEach: ${err}\n`);
|
||||||
|
throw err;
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
it('should complete an MPU with fewer parts than were ' +
|
||||||
|
'originally put and return a version id', done => {
|
||||||
|
_completeMpuAndCheckVid(uploadId, firstEtag, true, done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
testing('on bucket with suspended versioning', () => {
|
||||||
|
let uploadId;
|
||||||
|
let firstEtag;
|
||||||
|
|
||||||
|
beforeEach(() => s3.createBucketAsync({ Bucket: bucket })
|
||||||
|
.then(() => s3.putBucketVersioningAsync({ Bucket: bucket,
|
||||||
|
VersioningConfiguration: constants.versioningSuspended }))
|
||||||
|
.then(() => s3.createMultipartUploadAsync({
|
||||||
|
Bucket: bucket, Key: key }))
|
||||||
|
.then(res => {
|
||||||
|
uploadId = res.UploadId;
|
||||||
|
return s3.uploadPartAsync({ Bucket: bucket, Key: key,
|
||||||
|
PartNumber: 1, UploadId: uploadId, Body: bodyFirstPart });
|
||||||
|
})
|
||||||
|
.then(res => {
|
||||||
|
firstEtag = res.ETag;
|
||||||
|
return firstEtag;
|
||||||
|
})
|
||||||
|
.then(() => s3.uploadPartAsync({ Bucket: bucket, Key: key,
|
||||||
|
PartNumber: 2, UploadId: uploadId, Body: bodySecondPart }))
|
||||||
|
.catch(err => {
|
||||||
|
process.stdout.write(`Error in beforeEach: ${err}\n`);
|
||||||
|
throw err;
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
it('should complete an MPU with fewer parts than were ' +
|
||||||
|
'originally put and should not return a version id', done => {
|
||||||
|
_completeMpuAndCheckVid(uploadId, firstEtag, undefined, done);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -6,6 +6,7 @@ import getConfig from '../support/config';
|
||||||
import methodRequest from '../../lib/utility/cors-util';
|
import methodRequest from '../../lib/utility/cors-util';
|
||||||
import { generateCorsParams } from '../../lib/utility/cors-util';
|
import { generateCorsParams } from '../../lib/utility/cors-util';
|
||||||
import { WebsiteConfigTester } from '../../lib/utility/website-util';
|
import { WebsiteConfigTester } from '../../lib/utility/website-util';
|
||||||
|
import { removeAllVersions } from '../../lib/utility/versioning-util';
|
||||||
|
|
||||||
const config = getConfig('default', { signatureVersion: 'v4' });
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
const s3 = new S3(config);
|
const s3 = new S3(config);
|
||||||
|
@ -450,7 +451,7 @@ describe('Cross Origin Resource Sharing requests', () => {
|
||||||
beforeEach(done => s3.putBucketCors(corsParams, done));
|
beforeEach(done => s3.putBucketCors(corsParams, done));
|
||||||
|
|
||||||
afterEach(done => {
|
afterEach(done => {
|
||||||
s3.deleteObject({ Bucket: bucket, Key: objectKey }, err => {
|
removeAllVersions({ Bucket: bucket }, err => {
|
||||||
if (err && err.code !== 'NoSuchKey' &&
|
if (err && err.code !== 'NoSuchKey' &&
|
||||||
err.code !== 'NoSuchBucket') {
|
err.code !== 'NoSuchBucket') {
|
||||||
process.stdout.write(`Unexpected err in afterEach: ${err}`);
|
process.stdout.write(`Unexpected err in afterEach: ${err}`);
|
||||||
|
|
|
@ -3,7 +3,7 @@ import assert from 'assert';
|
||||||
import withV4 from '../support/withV4';
|
import withV4 from '../support/withV4';
|
||||||
import BucketUtility from '../../lib/utility/bucket-util';
|
import BucketUtility from '../../lib/utility/bucket-util';
|
||||||
|
|
||||||
const bucket = 'object-test-mpu';
|
const bucket = `object-test-mpu-${Date.now()}`;
|
||||||
const objectKey = 'toAbort&<>"\'';
|
const objectKey = 'toAbort&<>"\'';
|
||||||
|
|
||||||
// Get the expected object of listMPU API.
|
// Get the expected object of listMPU API.
|
||||||
|
|
|
@ -0,0 +1,399 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import { S3 } from 'aws-sdk';
|
||||||
|
import async from 'async';
|
||||||
|
|
||||||
|
import getConfig from '../support/config';
|
||||||
|
|
||||||
|
const bucket = `versioning-bucket-${Date.now()}`;
|
||||||
|
|
||||||
|
const testing = process.env.VERSIONING === 'no' ?
|
||||||
|
describe.skip : describe;
|
||||||
|
|
||||||
|
testing('listObject - Delimiter version', function testSuite() {
|
||||||
|
this.timeout(600000);
|
||||||
|
let s3 = undefined;
|
||||||
|
|
||||||
|
function _deleteVersionList(versionList, bucket, callback) {
|
||||||
|
async.each(versionList, (versionInfo, cb) => {
|
||||||
|
const versionId = versionInfo.VersionId;
|
||||||
|
const params = { Bucket: bucket, Key: versionInfo.Key,
|
||||||
|
VersionId: versionId };
|
||||||
|
s3.deleteObject(params, cb);
|
||||||
|
}, callback);
|
||||||
|
}
|
||||||
|
function _removeAllVersions(bucket, callback) {
|
||||||
|
return s3.listObjectVersions({ Bucket: bucket }, (err, data) => {
|
||||||
|
if (err && err.NoSuchBucket) {
|
||||||
|
return callback();
|
||||||
|
} else if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return _deleteVersionList(data.DeleteMarkers, bucket, err => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return _deleteVersionList(data.Versions, bucket, callback);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// setup test
|
||||||
|
before(done => {
|
||||||
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
|
s3 = new S3(config);
|
||||||
|
s3.createBucket({ Bucket: bucket }, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
// delete bucket after testing
|
||||||
|
after(done => {
|
||||||
|
_removeAllVersions(bucket, err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
return s3.deleteBucket({ Bucket: bucket }, err => {
|
||||||
|
assert.strictEqual(err, null,
|
||||||
|
`Error deleting bucket: ${err}`);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
let versioning = false;
|
||||||
|
|
||||||
|
const objects = [
|
||||||
|
{ name: 'notes/summer/august/1.txt', value: 'foo', isNull: true },
|
||||||
|
{ name: 'notes/year.txt', value: 'foo', isNull: true },
|
||||||
|
{ name: 'notes/yore.rs', value: 'foo', isNull: true },
|
||||||
|
{ name: 'notes/zaphod/Beeblebrox.txt', value: 'foo', isNull: true },
|
||||||
|
{ name: 'Pâtisserie=中文-español-English', value: 'foo' },
|
||||||
|
{ name: 'Pâtisserie=中文-español-English', value: 'bar' },
|
||||||
|
{ name: 'notes/spring/1.txt', value: 'qux' },
|
||||||
|
{ name: 'notes/spring/1.txt', value: 'foo' },
|
||||||
|
{ name: 'notes/spring/1.txt', value: 'bar' },
|
||||||
|
{ name: 'notes/spring/2.txt', value: 'foo' },
|
||||||
|
{ name: 'notes/spring/2.txt', value: null },
|
||||||
|
{ name: 'notes/spring/march/1.txt', value: 'foo' },
|
||||||
|
{ name: 'notes/spring/march/1.txt', value: 'bar', isNull: true },
|
||||||
|
{ name: 'notes/summer/1.txt', value: 'foo' },
|
||||||
|
{ name: 'notes/summer/1.txt', value: 'bar' },
|
||||||
|
{ name: 'notes/summer/2.txt', value: 'bar' },
|
||||||
|
{ name: 'notes/summer/4.txt', value: null },
|
||||||
|
{ name: 'notes/summer/4.txt', value: null },
|
||||||
|
{ name: 'notes/summer/4.txt', value: null },
|
||||||
|
{ name: 'notes/summer/444.txt', value: null },
|
||||||
|
{ name: 'notes/summer/44444.txt', value: null },
|
||||||
|
];
|
||||||
|
|
||||||
|
it('put objects inside bucket', done => {
|
||||||
|
async.eachSeries(objects, (obj, next) => {
|
||||||
|
async.waterfall([
|
||||||
|
next => {
|
||||||
|
if (!versioning && obj.isNull !== true) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Enabled',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
versioning = true;
|
||||||
|
return s3.putBucketVersioning(params, err => next(err));
|
||||||
|
} else if (versioning && obj.isNull === true) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Suspended',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
versioning = false;
|
||||||
|
return s3.putBucketVersioning(params, err => next(err));
|
||||||
|
}
|
||||||
|
return next();
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
if (obj.value === null) {
|
||||||
|
return s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: obj.name,
|
||||||
|
}, function test(err) {
|
||||||
|
const headers = this.httpResponse.headers;
|
||||||
|
assert.strictEqual(headers['x-amz-delete-marker'],
|
||||||
|
'true');
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
obj.versionId = headers['x-amz-version-id'];
|
||||||
|
return next(err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: obj.name,
|
||||||
|
Body: obj.value,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
obj.versionId = res.VersionId || 'null';
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], err => next(err));
|
||||||
|
}, err => done(err));
|
||||||
|
});
|
||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
name: 'basic listing',
|
||||||
|
params: {},
|
||||||
|
expectedResult: objects,
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: false,
|
||||||
|
nextKeyMarker: undefined,
|
||||||
|
nextVersionIdMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'with valid key marker',
|
||||||
|
params: { KeyMarker: 'notes/spring/1.txt' },
|
||||||
|
expectedResult: [
|
||||||
|
objects[0],
|
||||||
|
objects[1],
|
||||||
|
objects[2],
|
||||||
|
objects[3],
|
||||||
|
objects[9],
|
||||||
|
objects[10],
|
||||||
|
objects[11],
|
||||||
|
objects[12],
|
||||||
|
objects[13],
|
||||||
|
objects[14],
|
||||||
|
objects[15],
|
||||||
|
objects[16],
|
||||||
|
objects[17],
|
||||||
|
objects[18],
|
||||||
|
objects[19],
|
||||||
|
objects[20],
|
||||||
|
],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: false,
|
||||||
|
nextKeyMarker: undefined,
|
||||||
|
nextVersionIdMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'with bad key marker',
|
||||||
|
params: { KeyMarker: 'zzzz', Delimiter: '/' },
|
||||||
|
expectedResult: [],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: false,
|
||||||
|
nextKeyMarker: undefined,
|
||||||
|
nextVersionIdMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'with maxKeys',
|
||||||
|
params: { MaxKeys: 3 },
|
||||||
|
expectedResult: [
|
||||||
|
objects[4],
|
||||||
|
objects[5],
|
||||||
|
objects[8],
|
||||||
|
],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: true,
|
||||||
|
nextKeyMarker: objects[8].name,
|
||||||
|
nextVersionIdMarker: objects[8],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'with big maxKeys',
|
||||||
|
params: { MaxKeys: 15000 },
|
||||||
|
expectedResult: objects,
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: false,
|
||||||
|
nextKeyMarker: undefined,
|
||||||
|
nextVersionIdMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'with delimiter',
|
||||||
|
params: { Delimiter: '/' },
|
||||||
|
expectedResult: objects.slice(4, 6),
|
||||||
|
commonPrefix: ['notes/'],
|
||||||
|
isTruncated: false,
|
||||||
|
nextKeyMarker: undefined,
|
||||||
|
nextVersionIdMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'with long delimiter',
|
||||||
|
params: { Delimiter: 'notes/summer' },
|
||||||
|
expectedResult: objects.filter(obj =>
|
||||||
|
obj.name.indexOf('notes/summer') < 0),
|
||||||
|
commonPrefix: ['notes/summer'],
|
||||||
|
isTruncated: false,
|
||||||
|
nextKeyMarker: undefined,
|
||||||
|
nextVersionIdMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'bad key marker and good prefix',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/summer/',
|
||||||
|
KeyMarker: 'notes/summer0',
|
||||||
|
},
|
||||||
|
expectedResult: [],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: false,
|
||||||
|
nextKeyMarker: undefined,
|
||||||
|
nextVersionIdMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'delimiter and prefix (related to #147)',
|
||||||
|
params: { Delimiter: '/', Prefix: 'notes/' },
|
||||||
|
expectedResult: [
|
||||||
|
objects[1],
|
||||||
|
objects[2],
|
||||||
|
],
|
||||||
|
commonPrefix: [
|
||||||
|
'notes/spring/',
|
||||||
|
'notes/summer/',
|
||||||
|
'notes/zaphod/',
|
||||||
|
],
|
||||||
|
isTruncated: false,
|
||||||
|
nextKeyMarker: undefined,
|
||||||
|
nextVersionIdMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'delimiter, prefix and marker (related to #147)',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/',
|
||||||
|
KeyMarker: 'notes/year.txt',
|
||||||
|
},
|
||||||
|
expectedResult: [objects[2]],
|
||||||
|
commonPrefix: ['notes/zaphod/'],
|
||||||
|
isTruncated: false,
|
||||||
|
nextKeyMarker: undefined,
|
||||||
|
nextVersionIdMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'all parameters 1/5',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/',
|
||||||
|
KeyMarker: 'notes/',
|
||||||
|
MaxKeys: 1,
|
||||||
|
},
|
||||||
|
expectedResult: [],
|
||||||
|
commonPrefix: ['notes/spring/'],
|
||||||
|
isTruncated: true,
|
||||||
|
nextKeyMarker: 'notes/spring/',
|
||||||
|
nextVersionIdMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'all parameters 2/5',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/',
|
||||||
|
KeyMarker: 'notes/spring/',
|
||||||
|
MaxKeys: 1,
|
||||||
|
},
|
||||||
|
expectedResult: [],
|
||||||
|
commonPrefix: ['notes/summer/'],
|
||||||
|
isTruncated: true,
|
||||||
|
nextKeyMarker: 'notes/summer/',
|
||||||
|
nextVersionIdMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'all parameters 3/5',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/',
|
||||||
|
KeyMarker: 'notes/summer/',
|
||||||
|
MaxKeys: 1,
|
||||||
|
},
|
||||||
|
expectedResult: [objects[1]],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: true,
|
||||||
|
nextKeyMarker: objects[1].name,
|
||||||
|
nextVersionIdMarker: objects[1],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'all parameters 4/5',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/',
|
||||||
|
KeyMarker: 'notes/year.txt',
|
||||||
|
MaxKeys: 1,
|
||||||
|
},
|
||||||
|
expectedResult: [objects[2]],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: true,
|
||||||
|
nextKeyMarker: objects[2].name,
|
||||||
|
nextVersionIdMarker: objects[2],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'all parameters 5/5',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/',
|
||||||
|
KeyMarker: 'notes/yore.rs',
|
||||||
|
MaxKeys: 1,
|
||||||
|
},
|
||||||
|
expectedResult: [],
|
||||||
|
commonPrefix: ['notes/zaphod/'],
|
||||||
|
isTruncated: false,
|
||||||
|
nextKeyMarker: undefined,
|
||||||
|
nextVersionIdMarker: undefined,
|
||||||
|
},
|
||||||
|
].forEach(test => {
|
||||||
|
it(test.name, done => {
|
||||||
|
const expectedResult = test.expectedResult;
|
||||||
|
s3.listObjectVersions(
|
||||||
|
Object.assign({ Bucket: bucket }, test.params),
|
||||||
|
(err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
res.Versions.forEach(result => {
|
||||||
|
const item = expectedResult.find(obj => {
|
||||||
|
if (obj.name === result.Key &&
|
||||||
|
obj.versionId === result.VersionId &&
|
||||||
|
obj.value !== null) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
if (!item) {
|
||||||
|
throw new Error(
|
||||||
|
`listing fail, unexpected key ${result.Key} ` +
|
||||||
|
`with version ${result.VersionId}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
res.DeleteMarkers.forEach(result => {
|
||||||
|
const item = expectedResult.find(obj => {
|
||||||
|
if (obj.name === result.Key &&
|
||||||
|
obj.versionId === result.VersionId &&
|
||||||
|
obj.value === null) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
if (!item) {
|
||||||
|
throw new Error(
|
||||||
|
`listing fail, unexpected key ${result.Key} ` +
|
||||||
|
`with version ${result.VersionId}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
res.CommonPrefixes.forEach(cp => {
|
||||||
|
if (!test.commonPrefix.find(
|
||||||
|
item => item === cp.Prefix)) {
|
||||||
|
throw new Error(
|
||||||
|
`listing fail, unexpected prefix ${cp.Prefix}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
assert.strictEqual(res.IsTruncated, test.isTruncated);
|
||||||
|
assert.strictEqual(res.NextKeyMarker, test.nextKeyMarker);
|
||||||
|
if (!test.nextVersionIdMarker) {
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
test.nextVersionIdMarker = {};
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.NextVersionIdMarker,
|
||||||
|
test.nextVersionIdMarker.versionId);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,137 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import async from 'async';
|
||||||
|
import BucketUtility from '../../lib/utility/bucket-util';
|
||||||
|
|
||||||
|
const bucketName = `multi-object-delete-${Date.now()}`;
|
||||||
|
const key = 'key';
|
||||||
|
|
||||||
|
function checkNoError(err) {
|
||||||
|
assert.equal(err, null,
|
||||||
|
`Expected success, got error ${JSON.stringify(err)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
function sortList(list) {
|
||||||
|
return list.sort((a, b) => {
|
||||||
|
if (a.Key > b.Key) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
if (a.Key < b.Key) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = process.env.VERSIONING === 'no' ? describe.skip : describe;
|
||||||
|
|
||||||
|
testing('Multi-Object Versioning Delete Success', function success() {
|
||||||
|
this.timeout(360000);
|
||||||
|
let bucketUtil;
|
||||||
|
let s3;
|
||||||
|
let objectsRes;
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
bucketUtil = new BucketUtility('default', {
|
||||||
|
signatureVersion: 'v4',
|
||||||
|
});
|
||||||
|
s3 = bucketUtil.s3;
|
||||||
|
async.waterfall([
|
||||||
|
next => s3.createBucket({ Bucket: bucketName }, err => next(err)),
|
||||||
|
next => s3.putBucketVersioning({
|
||||||
|
Bucket: bucketName,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Enabled',
|
||||||
|
},
|
||||||
|
}, err => next(err)),
|
||||||
|
next => {
|
||||||
|
const objects = [];
|
||||||
|
for (let i = 1; i < 1001; i ++) {
|
||||||
|
objects.push(`${key}${i}`);
|
||||||
|
}
|
||||||
|
async.mapLimit(objects, 20, (key, next) => {
|
||||||
|
s3.putObject({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: key,
|
||||||
|
Body: 'somebody',
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
res.Key = key;
|
||||||
|
return next(null, res);
|
||||||
|
});
|
||||||
|
}, (err, results) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
objectsRes = results;
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
},
|
||||||
|
], err => done(err));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => s3.deleteBucketAsync({ Bucket: bucketName }));
|
||||||
|
|
||||||
|
it('should batch delete 1000 objects quietly', () => {
|
||||||
|
const objects = objectsRes.slice(0, 1000).map(obj =>
|
||||||
|
({ Key: obj.Key, VersionId: obj.VersionId }));
|
||||||
|
return s3.deleteObjectsAsync({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Delete: {
|
||||||
|
Objects: objects,
|
||||||
|
Quiet: true,
|
||||||
|
},
|
||||||
|
}).then(res => {
|
||||||
|
assert.strictEqual(res.Deleted.length, 0);
|
||||||
|
assert.strictEqual(res.Errors.length, 0);
|
||||||
|
}).catch(err => {
|
||||||
|
checkNoError(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should batch delete 1000 objects', () => {
|
||||||
|
const objects = objectsRes.slice(0, 1000).map(obj =>
|
||||||
|
({ Key: obj.Key, VersionId: obj.VersionId }));
|
||||||
|
return s3.deleteObjectsAsync({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Delete: {
|
||||||
|
Objects: objects,
|
||||||
|
Quiet: false,
|
||||||
|
},
|
||||||
|
}).then(res => {
|
||||||
|
assert.strictEqual(res.Deleted.length, 1000);
|
||||||
|
// order of returned objects not sorted
|
||||||
|
assert.deepStrictEqual(sortList(res.Deleted), sortList(objects));
|
||||||
|
assert.strictEqual(res.Errors.length, 0);
|
||||||
|
}).catch(err => {
|
||||||
|
checkNoError(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not send back error if one versionId is invalid', () => {
|
||||||
|
const objects = objectsRes.slice(0, 1000).map(obj =>
|
||||||
|
({ Key: obj.Key, VersionId: obj.VersionId }));
|
||||||
|
const prevVersion = objects[0].VersionId;
|
||||||
|
objects[0].VersionId = 'invalid-version-id';
|
||||||
|
return s3.deleteObjectsAsync({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Delete: {
|
||||||
|
Objects: objects,
|
||||||
|
},
|
||||||
|
}).then(res =>
|
||||||
|
s3.deleteObjectAsync({
|
||||||
|
Bucket: bucketName,
|
||||||
|
Key: objects[0].Key,
|
||||||
|
VersionId: prevVersion,
|
||||||
|
}).then(() => {
|
||||||
|
assert.strictEqual(res.Deleted.length, 999);
|
||||||
|
assert.strictEqual(res.Errors.length, 1);
|
||||||
|
assert.strictEqual(res.Errors[0].Code, 'NoSuchVersion');
|
||||||
|
})
|
||||||
|
).catch(err => {
|
||||||
|
checkNoError(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,270 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import { S3 } from 'aws-sdk';
|
||||||
|
import async from 'async';
|
||||||
|
import { versioning } from 'arsenal';
|
||||||
|
import {
|
||||||
|
removeAllVersions,
|
||||||
|
constants,
|
||||||
|
} from '../../lib/utility/versioning-util.js';
|
||||||
|
|
||||||
|
import getConfig from '../support/config';
|
||||||
|
|
||||||
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
|
const s3 = new S3(config);
|
||||||
|
const counter = 100;
|
||||||
|
let bucket;
|
||||||
|
const key = '/';
|
||||||
|
const fakeId = 'fakeId';
|
||||||
|
const VID_INF = versioning.VersionID.VID_INF;
|
||||||
|
const nonExistingId = versioning.VersionID
|
||||||
|
.encrypt(`${VID_INF.slice(VID_INF.length - 1)}7`);
|
||||||
|
|
||||||
|
function _assertNoError(err, desc) {
|
||||||
|
assert.strictEqual(err, null, `Unexpected err ${desc}: ${err}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// need a wrapper because sdk apparently does not include version id in
|
||||||
|
// exposed data object for put/get acl methods
|
||||||
|
function _wrapDataObject(method, params, callback) {
|
||||||
|
const request = s3[method](params, (err, data) => {
|
||||||
|
const responseHeaders = request.response.httpResponse.headers;
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
const dataObj = Object.assign({
|
||||||
|
VersionId: responseHeaders['x-amz-version-id'],
|
||||||
|
}, data);
|
||||||
|
return callback(null, dataObj);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function _getObjectAcl(params, callback) {
|
||||||
|
_wrapDataObject('getObjectAcl', params, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _putObjectAcl(params, callback) {
|
||||||
|
_wrapDataObject('putObjectAcl', params, callback);
|
||||||
|
}
|
||||||
|
|
||||||
|
function _putAndGetAcl(cannedAcl, versionId, putResVerId, getResVerId, cb) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
ACL: cannedAcl,
|
||||||
|
VersionId: versionId,
|
||||||
|
};
|
||||||
|
_putObjectAcl(params, (err, data) => {
|
||||||
|
_assertNoError(err, `putting object acl with version id: ${versionId}`);
|
||||||
|
assert.strictEqual(data.VersionId, putResVerId,
|
||||||
|
`expected version id '${putResVerId}' in putacl res headers, ` +
|
||||||
|
`got '${data.VersionId}' instead`);
|
||||||
|
delete params.ACL;
|
||||||
|
_getObjectAcl(params, (err, data) => {
|
||||||
|
_assertNoError(err,
|
||||||
|
`getting object acl with version id: ${versionId}`);
|
||||||
|
assert.strictEqual(data.VersionId, getResVerId,
|
||||||
|
`expected version id '${getResVerId}' in getacl res headers, ` +
|
||||||
|
`got '${data.VersionId}' instead`);
|
||||||
|
assert.strictEqual(data.Grants.length, 2);
|
||||||
|
cb();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function _testBehaviorVersioningEnabledOrSuspended(versionIds) {
|
||||||
|
it('non-version specific put and get ACL should target latest ' +
|
||||||
|
'version AND return version ID in response headers', done => {
|
||||||
|
const latestVersion = versionIds[versionIds.length - 1];
|
||||||
|
_putAndGetAcl('public-read', undefined, latestVersion,
|
||||||
|
latestVersion, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('version specific put and get ACL should return version ID ' +
|
||||||
|
'in response headers', done => {
|
||||||
|
const firstVersion = versionIds[0];
|
||||||
|
_putAndGetAcl('public-read', firstVersion, firstVersion,
|
||||||
|
firstVersion, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('version specific put and get ACL (version id = "null") ' +
|
||||||
|
'should return version ID ("null") in response headers', done => {
|
||||||
|
_putAndGetAcl('public-read', 'null', 'null', 'null', done);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = process.env.VERSIONING === 'no' ? describe.skip : describe.skip;
|
||||||
|
|
||||||
|
testing('put and get object acl with versioning', function testSuite() {
|
||||||
|
this.timeout(600000);
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
bucket = `versioning-bucket-acl-${Date.now()}`;
|
||||||
|
s3.createBucket({ Bucket: bucket }, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
removeAllVersions({ Bucket: bucket }, err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
return s3.deleteBucket({ Bucket: bucket }, done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('in a bucket without versioning configuration', () => {
|
||||||
|
beforeEach(done => {
|
||||||
|
s3.putObject({ Bucket: bucket, Key: key }, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not return version id for non-version specific ' +
|
||||||
|
'put and get ACL', done => {
|
||||||
|
_putAndGetAcl('public-read', undefined, undefined, undefined, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not return version id for version specific ' +
|
||||||
|
'put and get ACL (version id = "null")', done => {
|
||||||
|
_putAndGetAcl('public-read', 'null', undefined, undefined, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return NoSuchVersion if attempting to put acl for ' +
|
||||||
|
'non-existing version', done => {
|
||||||
|
const params = { Bucket: bucket, Key: key, VersionId: nonExistingId,
|
||||||
|
ACL: 'private' };
|
||||||
|
s3.putObjectAcl(params, err => {
|
||||||
|
assert(err, 'Expected err but did not find one');
|
||||||
|
assert.strictEqual(err.code, 'NoSuchVersion');
|
||||||
|
assert.strictEqual(err.statusCode, 404);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return InvalidArgument if attempting to put acl for ' +
|
||||||
|
'invalid id', done => {
|
||||||
|
const params = { Bucket: bucket, Key: key, VersionId: fakeId,
|
||||||
|
ACL: 'private' };
|
||||||
|
s3.putObjectAcl(params, err => {
|
||||||
|
assert(err, 'Expected err but did not find one');
|
||||||
|
assert.strictEqual(err.code, 'InvalidArgument');
|
||||||
|
assert.strictEqual(err.statusCode, 400);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return NoSuchVersion if attempting to get acl for ' +
|
||||||
|
'non-existing version', done => {
|
||||||
|
const params = { Bucket: bucket, Key: key,
|
||||||
|
VersionId: nonExistingId };
|
||||||
|
s3.getObjectAcl(params, err => {
|
||||||
|
assert(err, 'Expected err but did not find one');
|
||||||
|
assert.strictEqual(err.code, 'NoSuchVersion');
|
||||||
|
assert.strictEqual(err.statusCode, 404);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return InvalidArgument if attempting to get acl for ' +
|
||||||
|
'invalid id', done => {
|
||||||
|
const params = { Bucket: bucket, Key: key, VersionId: fakeId };
|
||||||
|
s3.getObjectAcl(params, err => {
|
||||||
|
assert(err, 'Expected err but did not find one');
|
||||||
|
assert.strictEqual(err.code, 'InvalidArgument');
|
||||||
|
assert.strictEqual(err.statusCode, 400);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('on a version-enabled bucket with non-versioned object', () => {
|
||||||
|
const versionIds = [];
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
const params = { Bucket: bucket, Key: key };
|
||||||
|
async.waterfall([
|
||||||
|
callback => s3.putObject(params, err => callback(err)),
|
||||||
|
callback => s3.putBucketVersioning({
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: constants.versioningEnabled,
|
||||||
|
}, err => callback(err)),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
// cleanup versionIds just in case
|
||||||
|
versionIds.length = 0;
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('before putting new versions', () => {
|
||||||
|
it('non-version specific put and get ACL should now ' +
|
||||||
|
'return version ID ("null") in response headers', done => {
|
||||||
|
_putAndGetAcl('public-read', undefined, 'null', 'null', done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('after putting new versions', () => {
|
||||||
|
beforeEach(done => {
|
||||||
|
const params = { Bucket: bucket, Key: key };
|
||||||
|
async.timesSeries(counter, (i, next) =>
|
||||||
|
s3.putObject(params, (err, data) => {
|
||||||
|
_assertNoError(err, `putting version #${i}`);
|
||||||
|
versionIds.push(data.VersionId);
|
||||||
|
next(err);
|
||||||
|
}), done);
|
||||||
|
});
|
||||||
|
|
||||||
|
_testBehaviorVersioningEnabledOrSuspended(versionIds);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('on version-suspended bucket with non-versioned object', () => {
|
||||||
|
const versionIds = [];
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
const params = { Bucket: bucket, Key: key };
|
||||||
|
async.waterfall([
|
||||||
|
callback => s3.putObject(params, err => callback(err)),
|
||||||
|
callback => s3.putBucketVersioning({
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: constants.versioningSuspended,
|
||||||
|
}, err => callback(err)),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
// cleanup versionIds just in case
|
||||||
|
versionIds.length = 0;
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('before putting new versions', () => {
|
||||||
|
it('non-version specific put and get ACL should still ' +
|
||||||
|
'return version ID ("null") in response headers', done => {
|
||||||
|
_putAndGetAcl('public-read', undefined, 'null', 'null', done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('after putting new versions', () => {
|
||||||
|
beforeEach(done => {
|
||||||
|
const params = { Bucket: bucket, Key: key };
|
||||||
|
async.waterfall([
|
||||||
|
callback => s3.putBucketVersioning({
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: constants.versioningEnabled,
|
||||||
|
}, err => callback(err)),
|
||||||
|
callback => async.timesSeries(counter, (i, next) =>
|
||||||
|
s3.putObject(params, (err, data) => {
|
||||||
|
_assertNoError(err, `putting version #${i}`);
|
||||||
|
versionIds.push(data.VersionId);
|
||||||
|
next(err);
|
||||||
|
}), err => callback(err)),
|
||||||
|
callback => s3.putBucketVersioning({
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: constants.versioningSuspended,
|
||||||
|
}, err => callback(err)),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
_testBehaviorVersioningEnabledOrSuspended(versionIds);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,533 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import { S3 } from 'aws-sdk';
|
||||||
|
import async from 'async';
|
||||||
|
|
||||||
|
import getConfig from '../support/config';
|
||||||
|
|
||||||
|
const bucket = `versioning-bucket-${Date.now()}`;
|
||||||
|
const key = 'anObject';
|
||||||
|
|
||||||
|
const testing = process.env.VERSIONING === 'no' ? describe.skip : describe;
|
||||||
|
|
||||||
|
testing('aws-node-sdk test delete object', function testSuite() {
|
||||||
|
this.timeout(600000);
|
||||||
|
let s3 = undefined;
|
||||||
|
let versionIds = undefined;
|
||||||
|
|
||||||
|
function _deleteVersionList(versionList, bucket, callback) {
|
||||||
|
async.each(versionList, (versionInfo, cb) => {
|
||||||
|
const versionId = versionInfo.VersionId;
|
||||||
|
const params = { Bucket: bucket, Key: versionInfo.Key,
|
||||||
|
VersionId: versionId };
|
||||||
|
s3.deleteObject(params, cb);
|
||||||
|
}, callback);
|
||||||
|
}
|
||||||
|
function _removeAllVersions(bucket, callback) {
|
||||||
|
return s3.listObjectVersions({ Bucket: bucket }, (err, data) => {
|
||||||
|
process.stdout.write(
|
||||||
|
'list object versions before deletion' +
|
||||||
|
`${JSON.stringify(data, undefined, '\t')}`);
|
||||||
|
if (err && err.NoSuchBucket) {
|
||||||
|
return callback();
|
||||||
|
} else if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return _deleteVersionList(data.DeleteMarkers, bucket, err => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return _deleteVersionList(data.Versions, bucket, callback);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// setup test
|
||||||
|
before(done => {
|
||||||
|
versionIds = [];
|
||||||
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
|
s3 = new S3(config);
|
||||||
|
s3.createBucket({ Bucket: bucket }, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
// delete bucket after testing
|
||||||
|
after(done => {
|
||||||
|
// TODO: remove conditional after listing is implemented
|
||||||
|
if (process.env.AWS_ON_AIR === 'true') {
|
||||||
|
return _removeAllVersions(bucket, err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
return s3.deleteBucket({ Bucket: bucket }, err => {
|
||||||
|
assert.strictEqual(err, null,
|
||||||
|
`Error deleting bucket: ${err}`);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creating non-versionned object', done => {
|
||||||
|
s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.equal(res.VersionId, undefined);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('enable versioning', done => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Enabled',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
s3.putBucketVersioning(params, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not send back error for non-existing key (specific version)',
|
||||||
|
done => {
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: `${key}3`,
|
||||||
|
VersionId: 'null',
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('delete non existent object should create a delete marker', done => {
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: `${key}2`,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.DeleteMarker, 'true');
|
||||||
|
assert.notEqual(res.VersionId, undefined);
|
||||||
|
return s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: `${key}2`,
|
||||||
|
}, (err, res2) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res2.DeleteMarker, 'true');
|
||||||
|
assert.notEqual(res2.VersionId, res.VersionId);
|
||||||
|
return s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: `${key}2`,
|
||||||
|
VersionId: res.VersionId,
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
return s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: `${key}2`,
|
||||||
|
VersionId: res2.VersionId,
|
||||||
|
}, err => done(err));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('put a version to the object', done => {
|
||||||
|
s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
Body: 'test',
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
versionIds.push('null');
|
||||||
|
versionIds.push(res.VersionId);
|
||||||
|
assert.notEqual(res.VersionId, undefined);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create a delete marker', done => {
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.DeleteMarker, 'true');
|
||||||
|
assert.strictEqual(versionIds.find(item => item === res.VersionId),
|
||||||
|
undefined);
|
||||||
|
versionIds.push(res.VersionId);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return 404 with a delete marker', done => {
|
||||||
|
s3.getObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
}, function test(err) {
|
||||||
|
if (!err) {
|
||||||
|
return done(new Error('should return 404'));
|
||||||
|
}
|
||||||
|
const headers = this.httpResponse.headers;
|
||||||
|
assert.strictEqual(headers['x-amz-delete-marker'], 'true');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should delete the null version', done => {
|
||||||
|
const version = versionIds.shift();
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
VersionId: version,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.VersionId, version);
|
||||||
|
assert.equal(res.DeleteMarker, undefined);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should delete the versionned object', done => {
|
||||||
|
const version = versionIds.shift();
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
VersionId: version,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.VersionId, version);
|
||||||
|
assert.equal(res.DeleteMarker, undefined);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should delete the delete-marker version', done => {
|
||||||
|
const version = versionIds.shift();
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
VersionId: version,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.VersionId, version);
|
||||||
|
assert.equal(res.DeleteMarker, 'true');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('put a new version', done => {
|
||||||
|
s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
Body: 'test',
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
versionIds.push(res.VersionId);
|
||||||
|
assert.notEqual(res.VersionId, undefined);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('get the null version', done => {
|
||||||
|
s3.getObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
VersionId: 'null',
|
||||||
|
}, err => {
|
||||||
|
if (!err || err.code !== 'NoSuchVersion') {
|
||||||
|
return done(err || 'should send back an error');
|
||||||
|
}
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('suspending versioning', done => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Suspended',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
s3.putBucketVersioning(params, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('delete non existent object should create a delete marker', done => {
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: `${key}2`,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.DeleteMarker, 'true');
|
||||||
|
assert.notEqual(res.VersionId, undefined);
|
||||||
|
return s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: `${key}2`,
|
||||||
|
}, (err, res2) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res2.DeleteMarker, 'true');
|
||||||
|
assert.strictEqual(res2.VersionId, res.VersionId);
|
||||||
|
return s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: `${key}2`,
|
||||||
|
VersionId: res.VersionId,
|
||||||
|
}, err => done(err));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should put a new delete marker', done => {
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.DeleteMarker, 'true');
|
||||||
|
assert.strictEqual(res.VersionId, 'null');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('enabling versioning', done => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Enabled',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
s3.putBucketVersioning(params, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get the null version', done => {
|
||||||
|
s3.getObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
VersionId: 'null',
|
||||||
|
}, function test(err) {
|
||||||
|
const headers = this.httpResponse.headers;
|
||||||
|
assert.strictEqual(headers['x-amz-delete-marker'], 'true');
|
||||||
|
assert.strictEqual(headers['x-amz-version-id'], 'null');
|
||||||
|
if (err && err.code !== 'MethodNotAllowed') {
|
||||||
|
return done(err);
|
||||||
|
} else if (err) {
|
||||||
|
return done();
|
||||||
|
}
|
||||||
|
return done('should return an error');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('put a new version to store the null version', done => {
|
||||||
|
s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
Body: 'test',
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
versionIds.push(res.VersionId);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('suspending versioning', done => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Suspended',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
s3.putBucketVersioning(params, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('put null version', done => {
|
||||||
|
s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
Body: 'test-null-version',
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.VersionId, undefined);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('enabling versioning', done => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Enabled',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
s3.putBucketVersioning(params, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get the null version', done => {
|
||||||
|
s3.getObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.Body.toString(), 'test-null-version');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should add a delete marker', done => {
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.DeleteMarker, 'true');
|
||||||
|
versionIds.push(res.VersionId);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get the null version', done => {
|
||||||
|
s3.getObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
VersionId: 'null',
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.Body.toString(), 'test-null-version');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should add a delete marker', done => {
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.DeleteMarker, 'true');
|
||||||
|
assert.strictEqual(versionIds.find(item => item === res.VersionId),
|
||||||
|
undefined);
|
||||||
|
versionIds.push(res.VersionId);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should set the null version as master', done => {
|
||||||
|
let version = versionIds.pop();
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
VersionId: version,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.VersionId, version);
|
||||||
|
assert.strictEqual(res.DeleteMarker, 'true');
|
||||||
|
version = versionIds.pop();
|
||||||
|
return s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
VersionId: version,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.VersionId, version);
|
||||||
|
assert.strictEqual(res.DeleteMarker, 'true');
|
||||||
|
return s3.getObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.Body.toString(),
|
||||||
|
'test-null-version');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should delete null version', done => {
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
VersionId: 'null',
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.VersionId, 'null');
|
||||||
|
return s3.getObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.VersionId,
|
||||||
|
versionIds[versionIds.length - 1]);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be able to delete the bucket', done => {
|
||||||
|
async.eachSeries(versionIds, (id, next) => {
|
||||||
|
s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
VersionId: id,
|
||||||
|
}, (err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return next(err);
|
||||||
|
}
|
||||||
|
assert.strictEqual(res.VersionId, id);
|
||||||
|
return next();
|
||||||
|
});
|
||||||
|
}, err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
return s3.deleteBucket({ Bucket: bucket }, err => done(err));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,361 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import { S3 } from 'aws-sdk';
|
||||||
|
import async from 'async';
|
||||||
|
|
||||||
|
import getConfig from '../support/config';
|
||||||
|
|
||||||
|
const bucket = `versioning-bucket-${Date.now()}`;
|
||||||
|
|
||||||
|
const testing = process.env.VERSIONING === 'no' ?
|
||||||
|
describe.skip : describe;
|
||||||
|
|
||||||
|
testing('listObject - Delimiter master', function testSuite() {
|
||||||
|
this.timeout(600000);
|
||||||
|
let s3 = undefined;
|
||||||
|
|
||||||
|
function _deleteVersionList(versionList, bucket, callback) {
|
||||||
|
async.each(versionList, (versionInfo, cb) => {
|
||||||
|
const versionId = versionInfo.VersionId;
|
||||||
|
const params = { Bucket: bucket, Key: versionInfo.Key,
|
||||||
|
VersionId: versionId };
|
||||||
|
s3.deleteObject(params, cb);
|
||||||
|
}, callback);
|
||||||
|
}
|
||||||
|
function _removeAllVersions(bucket, callback) {
|
||||||
|
return s3.listObjectVersions({ Bucket: bucket }, (err, data) => {
|
||||||
|
if (err && err.NoSuchBucket) {
|
||||||
|
return callback();
|
||||||
|
} else if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return _deleteVersionList(data.DeleteMarkers, bucket, err => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
return _deleteVersionList(data.Versions, bucket, callback);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// setup test
|
||||||
|
before(done => {
|
||||||
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
|
s3 = new S3(config);
|
||||||
|
s3.createBucket({ Bucket: bucket }, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
// delete bucket after testing
|
||||||
|
after(done => {
|
||||||
|
_removeAllVersions(bucket, err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
return s3.deleteBucket({ Bucket: bucket }, err => {
|
||||||
|
assert.strictEqual(err, null,
|
||||||
|
`Error deleting bucket: ${err}`);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
let versioning = false;
|
||||||
|
|
||||||
|
const objects = [
|
||||||
|
{ name: 'notes/summer/august/1.txt', value: 'foo', isNull: true },
|
||||||
|
{ name: 'notes/year.txt', value: 'foo', isNull: true },
|
||||||
|
{ name: 'notes/yore.rs', value: 'foo', isNull: true },
|
||||||
|
{ name: 'notes/zaphod/Beeblebrox.txt', value: 'foo', isNull: true },
|
||||||
|
{ name: 'Pâtisserie=中文-español-English', value: 'foo' },
|
||||||
|
{ name: 'Pâtisserie=中文-español-English', value: 'bar' },
|
||||||
|
{ name: 'notes/spring/1.txt', value: 'qux' },
|
||||||
|
{ name: 'notes/spring/1.txt', value: 'foo' },
|
||||||
|
{ name: 'notes/spring/1.txt', value: 'bar' },
|
||||||
|
{ name: 'notes/spring/2.txt', value: 'foo' },
|
||||||
|
{ name: 'notes/spring/2.txt', value: null },
|
||||||
|
{ name: 'notes/spring/march/1.txt', value: 'foo' },
|
||||||
|
{ name: 'notes/spring/march/1.txt', value: 'bar', isNull: true },
|
||||||
|
{ name: 'notes/summer/1.txt', value: 'foo' },
|
||||||
|
{ name: 'notes/summer/1.txt', value: 'bar' },
|
||||||
|
{ name: 'notes/summer/2.txt', value: 'bar' },
|
||||||
|
{ name: 'notes/summer/4.txt', value: null },
|
||||||
|
{ name: 'notes/summer/4.txt', value: null },
|
||||||
|
{ name: 'notes/summer/4.txt', value: null },
|
||||||
|
{ name: 'notes/summer/444.txt', value: null },
|
||||||
|
{ name: 'notes/summer/44444.txt', value: null },
|
||||||
|
];
|
||||||
|
|
||||||
|
it('put objects inside bucket', done => {
|
||||||
|
async.eachSeries(objects, (obj, next) => {
|
||||||
|
async.waterfall([
|
||||||
|
next => {
|
||||||
|
if (!versioning && obj.isNull !== true) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Enabled',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
versioning = true;
|
||||||
|
return s3.putBucketVersioning(params, err => next(err));
|
||||||
|
} else if (versioning && obj.isNull === true) {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Suspended',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
versioning = false;
|
||||||
|
return s3.putBucketVersioning(params, err => next(err));
|
||||||
|
}
|
||||||
|
return next();
|
||||||
|
},
|
||||||
|
next => {
|
||||||
|
if (obj.value === null) {
|
||||||
|
return s3.deleteObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: obj.name,
|
||||||
|
}, function test(err) {
|
||||||
|
const headers = this.httpResponse.headers;
|
||||||
|
assert.strictEqual(headers['x-amz-delete-marker'],
|
||||||
|
'true');
|
||||||
|
return next(err);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return s3.putObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: obj.name,
|
||||||
|
Body: obj.value,
|
||||||
|
}, err => next(err));
|
||||||
|
},
|
||||||
|
], err => next(err));
|
||||||
|
}, err => done(err));
|
||||||
|
});
|
||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
name: 'basic listing',
|
||||||
|
params: {},
|
||||||
|
expectedResult: [
|
||||||
|
'Pâtisserie=中文-español-English',
|
||||||
|
'notes/spring/1.txt',
|
||||||
|
'notes/spring/march/1.txt',
|
||||||
|
'notes/summer/1.txt',
|
||||||
|
'notes/summer/2.txt',
|
||||||
|
'notes/summer/august/1.txt',
|
||||||
|
'notes/year.txt',
|
||||||
|
'notes/yore.rs',
|
||||||
|
'notes/zaphod/Beeblebrox.txt',
|
||||||
|
],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: false,
|
||||||
|
nextMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'with valid marker',
|
||||||
|
params: { Marker: 'notes/summer/1.txt' },
|
||||||
|
expectedResult: [
|
||||||
|
'notes/summer/2.txt',
|
||||||
|
'notes/summer/august/1.txt',
|
||||||
|
'notes/year.txt',
|
||||||
|
'notes/yore.rs',
|
||||||
|
'notes/zaphod/Beeblebrox.txt',
|
||||||
|
],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: false,
|
||||||
|
nextMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'with bad marker',
|
||||||
|
params: { Marker: 'zzzz', Delimiter: '/' },
|
||||||
|
expectedResult: [],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: false,
|
||||||
|
nextMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'with maxKeys',
|
||||||
|
params: { MaxKeys: 3 },
|
||||||
|
expectedResult: [
|
||||||
|
'Pâtisserie=中文-español-English',
|
||||||
|
'notes/spring/1.txt',
|
||||||
|
'notes/spring/march/1.txt',
|
||||||
|
],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: true,
|
||||||
|
nextMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'with big maxKeys',
|
||||||
|
params: { MaxKeys: 15000 },
|
||||||
|
expectedResult: [
|
||||||
|
'Pâtisserie=中文-español-English',
|
||||||
|
'notes/spring/1.txt',
|
||||||
|
'notes/spring/march/1.txt',
|
||||||
|
'notes/summer/1.txt',
|
||||||
|
'notes/summer/2.txt',
|
||||||
|
'notes/summer/august/1.txt',
|
||||||
|
'notes/year.txt',
|
||||||
|
'notes/yore.rs',
|
||||||
|
'notes/zaphod/Beeblebrox.txt',
|
||||||
|
],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: false,
|
||||||
|
nextMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'with delimiter',
|
||||||
|
params: { Delimiter: '/' },
|
||||||
|
expectedResult: [
|
||||||
|
'Pâtisserie=中文-español-English',
|
||||||
|
],
|
||||||
|
commonPrefix: ['notes/'],
|
||||||
|
isTruncated: false,
|
||||||
|
nextMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'with long delimiter',
|
||||||
|
params: { Delimiter: 'notes/summer' },
|
||||||
|
expectedResult: [
|
||||||
|
'Pâtisserie=中文-español-English',
|
||||||
|
'notes/spring/1.txt',
|
||||||
|
'notes/spring/march/1.txt',
|
||||||
|
'notes/year.txt',
|
||||||
|
'notes/yore.rs',
|
||||||
|
'notes/zaphod/Beeblebrox.txt',
|
||||||
|
],
|
||||||
|
commonPrefix: ['notes/summer'],
|
||||||
|
isTruncated: false,
|
||||||
|
nextMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'bad marker and good prefix',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/summer/',
|
||||||
|
Marker: 'notes/summer0',
|
||||||
|
},
|
||||||
|
expectedResult: [],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: false,
|
||||||
|
nextMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'delimiter and prefix (related to #147)',
|
||||||
|
params: { Delimiter: '/', Prefix: 'notes/' },
|
||||||
|
expectedResult: [
|
||||||
|
'notes/year.txt',
|
||||||
|
'notes/yore.rs',
|
||||||
|
],
|
||||||
|
commonPrefix: [
|
||||||
|
'notes/spring/',
|
||||||
|
'notes/summer/',
|
||||||
|
'notes/zaphod/',
|
||||||
|
],
|
||||||
|
isTruncated: false,
|
||||||
|
nextMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'delimiter, prefix and marker (related to #147)',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/',
|
||||||
|
Marker: 'notes/year.txt',
|
||||||
|
},
|
||||||
|
expectedResult: ['notes/yore.rs'],
|
||||||
|
commonPrefix: ['notes/zaphod/'],
|
||||||
|
isTruncated: false,
|
||||||
|
nextMarker: undefined,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'all parameters 1/5',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/',
|
||||||
|
Marker: 'notes/',
|
||||||
|
MaxKeys: 1,
|
||||||
|
},
|
||||||
|
expectedResult: [],
|
||||||
|
commonPrefix: ['notes/spring/'],
|
||||||
|
isTruncated: true,
|
||||||
|
nextMarker: 'notes/spring/',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'all parameters 2/5',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/',
|
||||||
|
Marker: 'notes/spring/',
|
||||||
|
MaxKeys: 1,
|
||||||
|
},
|
||||||
|
expectedResult: [],
|
||||||
|
commonPrefix: ['notes/summer/'],
|
||||||
|
isTruncated: true,
|
||||||
|
nextMarker: 'notes/summer/',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'all parameters 3/5',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/',
|
||||||
|
Marker: 'notes/summer/',
|
||||||
|
MaxKeys: 1,
|
||||||
|
},
|
||||||
|
expectedResult: ['notes/year.txt'],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: true,
|
||||||
|
nextMarker: 'notes/year.txt',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'all parameters 4/5',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/',
|
||||||
|
Marker: 'notes/year.txt',
|
||||||
|
MaxKeys: 1,
|
||||||
|
},
|
||||||
|
expectedResult: ['notes/yore.rs'],
|
||||||
|
commonPrefix: [],
|
||||||
|
isTruncated: true,
|
||||||
|
nextMarker: 'notes/yore.rs',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'all parameters 5/5',
|
||||||
|
params: {
|
||||||
|
Delimiter: '/',
|
||||||
|
Prefix: 'notes/',
|
||||||
|
Marker: 'notes/yore.rs',
|
||||||
|
MaxKeys: 1,
|
||||||
|
},
|
||||||
|
expectedResult: [],
|
||||||
|
commonPrefix: ['notes/zaphod/'],
|
||||||
|
isTruncated: false,
|
||||||
|
nextMarker: undefined,
|
||||||
|
},
|
||||||
|
].forEach(test => {
|
||||||
|
it(test.name, done => {
|
||||||
|
const expectedResult = test.expectedResult;
|
||||||
|
s3.listObjects(Object.assign({ Bucket: bucket }, test.params),
|
||||||
|
(err, res) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
res.Contents.forEach(result => {
|
||||||
|
if (!expectedResult.find(key => key === result.Key)) {
|
||||||
|
throw new Error(
|
||||||
|
`listing fail, unexpected key ${result.Key}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
res.CommonPrefixes.forEach(cp => {
|
||||||
|
if (!test.commonPrefix.find(
|
||||||
|
item => item === cp.Prefix)) {
|
||||||
|
throw new Error(
|
||||||
|
`listing fail, unexpected prefix ${cp.Prefix}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
assert.strictEqual(res.IsTruncated, test.isTruncated);
|
||||||
|
assert.strictEqual(res.NextMarker, test.nextMarker);
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,376 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import { S3 } from 'aws-sdk';
|
||||||
|
import async from 'async';
|
||||||
|
import {
|
||||||
|
removeAllVersions,
|
||||||
|
constants,
|
||||||
|
} from '../../lib/utility/versioning-util.js';
|
||||||
|
|
||||||
|
import getConfig from '../support/config';
|
||||||
|
|
||||||
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
|
const s3 = new S3(config);
|
||||||
|
const data = ['foo1', 'foo2'];
|
||||||
|
const counter = 100;
|
||||||
|
let bucket;
|
||||||
|
const key = '/';
|
||||||
|
|
||||||
|
function _assertNoError(err, desc) {
|
||||||
|
assert.strictEqual(err, null, `Unexpected err ${desc}: ${err}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = process.env.VERSIONING === 'no' ? describe.skip : describe;
|
||||||
|
|
||||||
|
testing('put and get object with versioning', function testSuite() {
|
||||||
|
this.timeout(600000);
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
bucket = `versioning-bucket-${Date.now()}`;
|
||||||
|
s3.createBucket({ Bucket: bucket }, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
removeAllVersions({ Bucket: bucket }, err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
return s3.deleteBucket({ Bucket: bucket }, done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should put and get a non-versioned object without including ' +
|
||||||
|
'version ids in response headers', done => {
|
||||||
|
const params = { Bucket: bucket, Key: key };
|
||||||
|
s3.putObject(params, (err, data) => {
|
||||||
|
_assertNoError(err, 'putting object');
|
||||||
|
assert.strictEqual(data.VersionId, undefined);
|
||||||
|
s3.getObject(params, (err, data) => {
|
||||||
|
_assertNoError(err, 'getting object');
|
||||||
|
assert.strictEqual(data.VersionId, undefined);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('version-specific get should still not return version id in ' +
|
||||||
|
'response header', done => {
|
||||||
|
const params = { Bucket: bucket, Key: key };
|
||||||
|
s3.putObject(params, (err, data) => {
|
||||||
|
_assertNoError(err, 'putting object');
|
||||||
|
assert.strictEqual(data.VersionId, undefined);
|
||||||
|
params.VersionId = 'null';
|
||||||
|
s3.getObject(params, (err, data) => {
|
||||||
|
_assertNoError(err, 'getting specific object version "null"');
|
||||||
|
assert.strictEqual(data.VersionId, undefined);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('on a version-enabled bucket', () => {
|
||||||
|
beforeEach(done => {
|
||||||
|
s3.putBucketVersioning({
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: constants.versioningEnabled,
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create a new version for an object', done => {
|
||||||
|
const params = { Bucket: bucket, Key: key };
|
||||||
|
s3.putObject(params, (err, data) => {
|
||||||
|
_assertNoError(err, 'putting object');
|
||||||
|
params.VersionId = data.VersionId;
|
||||||
|
s3.getObject(params, (err, data) => {
|
||||||
|
_assertNoError(err, 'getting object');
|
||||||
|
assert.strictEqual(params.VersionId, data.VersionId,
|
||||||
|
'version ids are not equal');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('on a version-enabled bucket with non-versioned object', () => {
|
||||||
|
const eTags = [];
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
s3.putObject({ Bucket: bucket, Key: key, Body: data[0] },
|
||||||
|
(err, data) => {
|
||||||
|
if (err) {
|
||||||
|
done(err);
|
||||||
|
}
|
||||||
|
eTags.push(data.ETag);
|
||||||
|
s3.putBucketVersioning({
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: constants.versioningEnabled,
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
// reset eTags
|
||||||
|
eTags.length = 0;
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get null version in versioning enabled bucket',
|
||||||
|
done => {
|
||||||
|
const paramsNull = { Bucket: bucket, Key: '/', VersionId: 'null' };
|
||||||
|
s3.getObject(paramsNull, err => {
|
||||||
|
_assertNoError(err, 'getting null version');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should keep null version and create a new version for an object',
|
||||||
|
done => {
|
||||||
|
const params = { Bucket: bucket, Key: key, Body: data[1] };
|
||||||
|
s3.putObject(params, (err, data) => {
|
||||||
|
const newVersion = data.VersionId;
|
||||||
|
eTags.push(data.ETag);
|
||||||
|
s3.getObject({ Bucket: bucket, Key: key,
|
||||||
|
VersionId: newVersion }, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert.strictEqual(data.VersionId, newVersion,
|
||||||
|
'version ids are not equal');
|
||||||
|
assert.strictEqual(data.ETag, eTags[1]);
|
||||||
|
s3.getObject({ Bucket: bucket, Key: key,
|
||||||
|
VersionId: 'null' }, (err, data) => {
|
||||||
|
_assertNoError(err, 'getting null version');
|
||||||
|
assert.strictEqual(data.VersionId, 'null');
|
||||||
|
assert.strictEqual(data.ETag, eTags[0]);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create new versions but still keep nullVersionId',
|
||||||
|
done => {
|
||||||
|
const versionIds = [];
|
||||||
|
const params = { Bucket: bucket, Key: key };
|
||||||
|
const paramsNull = { Bucket: bucket, Key: key, VersionId: 'null' };
|
||||||
|
// create new versions
|
||||||
|
async.timesSeries(counter, (i, next) => s3.putObject(params,
|
||||||
|
(err, data) => {
|
||||||
|
versionIds.push(data.VersionId);
|
||||||
|
// get the 'null' version
|
||||||
|
s3.getObject(paramsNull, (err, nullVerData) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert.strictEqual(nullVerData.ETag, eTags[0]);
|
||||||
|
assert.strictEqual(nullVerData.VersionId, 'null');
|
||||||
|
next(err);
|
||||||
|
});
|
||||||
|
}), done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('on version-suspended bucket', () => {
|
||||||
|
beforeEach(done => {
|
||||||
|
s3.putBucketVersioning({
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: constants.versioningSuspended,
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not return version id for new object', done => {
|
||||||
|
const params = { Bucket: bucket, Key: key, Body: 'foo' };
|
||||||
|
const paramsNull = { Bucket: bucket, Key: key, VersionId: 'null' };
|
||||||
|
s3.putObject(params, (err, data) => {
|
||||||
|
const eTag = data.ETag;
|
||||||
|
_assertNoError(err, 'putting object');
|
||||||
|
assert.strictEqual(data.VersionId, undefined);
|
||||||
|
// getting null version should return object we just put
|
||||||
|
s3.getObject(paramsNull, (err, nullVerData) => {
|
||||||
|
_assertNoError(err, 'getting null version');
|
||||||
|
assert.strictEqual(nullVerData.ETag, eTag);
|
||||||
|
assert.strictEqual(nullVerData.VersionId, 'null');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should update null version if put object twice', done => {
|
||||||
|
const params = { Bucket: bucket, Key: key };
|
||||||
|
const params1 = { Bucket: bucket, Key: key, Body: data[0] };
|
||||||
|
const params2 = { Bucket: bucket, Key: key, Body: data[1] };
|
||||||
|
const paramsNull = { Bucket: bucket, Key: key, VersionId: 'null' };
|
||||||
|
const eTags = [];
|
||||||
|
async.waterfall([
|
||||||
|
callback => s3.putObject(params1, (err, data) => {
|
||||||
|
_assertNoError(err, 'putting first object');
|
||||||
|
assert.strictEqual(data.VersionId, undefined);
|
||||||
|
eTags.push(data.ETag);
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
callback => s3.getObject(params, (err, data) => {
|
||||||
|
_assertNoError(err, 'getting master version');
|
||||||
|
assert.strictEqual(data.VersionId, 'null');
|
||||||
|
assert.strictEqual(data.ETag, eTags[0],
|
||||||
|
'wrong object data');
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
callback => s3.putObject(params2, (err, data) => {
|
||||||
|
_assertNoError(err, 'putting second object');
|
||||||
|
assert.strictEqual(data.VersionId, undefined);
|
||||||
|
eTags.push(data.ETag);
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
callback => s3.getObject(paramsNull, (err, data) => {
|
||||||
|
_assertNoError(err, 'getting null version');
|
||||||
|
assert.strictEqual(data.VersionId, 'null');
|
||||||
|
assert.strictEqual(data.ETag, eTags[1],
|
||||||
|
'wrong object data');
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('on a version-suspended bucket with non-versioned object', () => {
|
||||||
|
const eTags = [];
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
s3.putObject({ Bucket: bucket, Key: key, Body: data[0] },
|
||||||
|
(err, data) => {
|
||||||
|
if (err) {
|
||||||
|
done(err);
|
||||||
|
}
|
||||||
|
eTags.push(data.ETag);
|
||||||
|
s3.putBucketVersioning({
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: constants.versioningSuspended,
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
// reset eTags
|
||||||
|
eTags.length = 0;
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get null version in versioning suspended bucket',
|
||||||
|
done => {
|
||||||
|
const paramsNull = { Bucket: bucket, Key: '/', VersionId: 'null' };
|
||||||
|
s3.getObject(paramsNull, err => {
|
||||||
|
_assertNoError(err, 'getting null version');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should update null version in versioning suspended bucket',
|
||||||
|
done => {
|
||||||
|
const params = { Bucket: bucket, Key: key };
|
||||||
|
const putParams = { Bucket: bucket, Key: '/', Body: data[1] };
|
||||||
|
const paramsNull = { Bucket: bucket, Key: key, VersionId: 'null' };
|
||||||
|
async.waterfall([
|
||||||
|
callback => s3.getObject(paramsNull, (err, data) => {
|
||||||
|
_assertNoError(err, 'getting null version');
|
||||||
|
assert.strictEqual(data.VersionId, 'null');
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
callback => s3.putObject(putParams, (err, data) => {
|
||||||
|
_assertNoError(err, 'putting object');
|
||||||
|
assert.strictEqual(data.VersionId, undefined);
|
||||||
|
eTags.push(data.ETag);
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
callback => s3.getObject(paramsNull, (err, data) => {
|
||||||
|
_assertNoError(err, 'getting null version');
|
||||||
|
assert.strictEqual(data.VersionId, 'null');
|
||||||
|
assert.strictEqual(data.ETag, eTags[1],
|
||||||
|
'wrong object data');
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
callback => s3.getObject(params, (err, data) => {
|
||||||
|
_assertNoError(err, 'getting master version');
|
||||||
|
assert.strictEqual(data.VersionId, 'null');
|
||||||
|
assert.strictEqual(data.ETag, eTags[1],
|
||||||
|
'wrong object data');
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('on versioning suspended then enabled bucket with null version',
|
||||||
|
() => {
|
||||||
|
const eTags = [];
|
||||||
|
beforeEach(done => {
|
||||||
|
const params = { Bucket: bucket, Key: key, Body: data[0] };
|
||||||
|
async.waterfall([
|
||||||
|
callback => s3.putBucketVersioning({
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: constants.versioningSuspended,
|
||||||
|
}, err => callback(err)),
|
||||||
|
callback => s3.putObject(params, (err, data) => {
|
||||||
|
if (err) {
|
||||||
|
callback(err);
|
||||||
|
}
|
||||||
|
eTags.push(data.ETag);
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
callback => s3.putBucketVersioning({
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: constants.versioningEnabled,
|
||||||
|
}, callback),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
// reset eTags
|
||||||
|
eTags.length = 0;
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should preserve the null version when creating new versions',
|
||||||
|
done => {
|
||||||
|
const params = { Bucket: bucket, Key: key };
|
||||||
|
const paramsNull = { Bucket: bucket, Key: key, VersionId: 'null' };
|
||||||
|
async.waterfall([
|
||||||
|
callback => s3.getObject(paramsNull, (err, nullVerData) => {
|
||||||
|
_assertNoError(err, 'getting null version');
|
||||||
|
assert.strictEqual(nullVerData.ETag, eTags[0]);
|
||||||
|
assert.strictEqual(nullVerData.VersionId, 'null');
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
callback => async.timesSeries(counter, (i, next) =>
|
||||||
|
s3.putObject(params, (err, data) => {
|
||||||
|
_assertNoError(err, `putting object #${i}`);
|
||||||
|
assert.notEqual(data.VersionId, undefined);
|
||||||
|
next();
|
||||||
|
}), err => callback(err)),
|
||||||
|
callback => s3.getObject(paramsNull, (err, nullVerData) => {
|
||||||
|
_assertNoError(err, 'getting null version');
|
||||||
|
assert.strictEqual(nullVerData.ETag, eTags[0]);
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create a bunch of objects and their versions', done => {
|
||||||
|
const vids = [];
|
||||||
|
const keycount = 50;
|
||||||
|
const versioncount = 20;
|
||||||
|
const value = '{"foo":"bar"}';
|
||||||
|
async.times(keycount, (i, next1) => {
|
||||||
|
const key = `foo${i}`;
|
||||||
|
const params = { Bucket: bucket, Key: key, Body: value };
|
||||||
|
async.times(versioncount, (j, next2) =>
|
||||||
|
s3.putObject(params, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert(data.VersionId, 'invalid versionId');
|
||||||
|
vids.push({ Key: key, VersionId: data.VersionId });
|
||||||
|
next2();
|
||||||
|
}), next1);
|
||||||
|
}, err => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert.strictEqual(vids.length, keycount * versioncount);
|
||||||
|
// TODO use delete marker and check with the result
|
||||||
|
process.stdout.write('creating objects done, now deleting...');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,130 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import { S3 } from 'aws-sdk';
|
||||||
|
import async from 'async';
|
||||||
|
|
||||||
|
import getConfig from '../support/config';
|
||||||
|
|
||||||
|
const bucket = `versioning-bucket-${Date.now()}`;
|
||||||
|
|
||||||
|
function comp(v1, v2) {
|
||||||
|
if (v1.Key > v2.Key) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
if (v1.Key < v2.Key) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
if (v1.VersionId > v2.VersionId) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
if (v1.VersionId < v2.VersionId) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
const testing = process.env.VERSIONING === 'no' ? describe.skip : describe;
|
||||||
|
|
||||||
|
testing('aws-node-sdk test bucket versioning listing', function testSuite() {
|
||||||
|
this.timeout(600000);
|
||||||
|
let s3 = undefined;
|
||||||
|
const masterVersions = [];
|
||||||
|
const allVersions = [];
|
||||||
|
|
||||||
|
// setup test
|
||||||
|
before(done => {
|
||||||
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
|
s3 = new S3(config);
|
||||||
|
s3.createBucket({ Bucket: bucket }, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
// delete bucket after testing
|
||||||
|
after(done => s3.deleteBucket({ Bucket: bucket }, done));
|
||||||
|
|
||||||
|
it('should accept valid versioning configuration', done => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Enabled',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
s3.putBucketVersioning(params, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create a bunch of objects and their versions', done => {
|
||||||
|
const keycount = 20;
|
||||||
|
const versioncount = 20;
|
||||||
|
const value = '{"foo":"bar"}';
|
||||||
|
async.times(keycount, (i, next1) => {
|
||||||
|
const key = `foo${i}`;
|
||||||
|
masterVersions.push(key);
|
||||||
|
const params = { Bucket: bucket, Key: key, Body: value };
|
||||||
|
async.times(versioncount, (j, next2) =>
|
||||||
|
s3.putObject(params, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert(data.VersionId, 'invalid versionId');
|
||||||
|
allVersions.push({ Key: key, VersionId: data.VersionId });
|
||||||
|
next2();
|
||||||
|
}), next1);
|
||||||
|
}, err => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert.strictEqual(allVersions.length, keycount * versioncount);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should list all latest versions', done => {
|
||||||
|
const params = { Bucket: bucket, MaxKeys: 1000, Delimiter: '/' };
|
||||||
|
s3.listObjects(params, (err, data) => {
|
||||||
|
const keys = data.Contents.map(entry => entry.Key);
|
||||||
|
assert.deepStrictEqual(keys.sort(), masterVersions.sort(),
|
||||||
|
'not same keys');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create some delete markers', done => {
|
||||||
|
const keycount = 15;
|
||||||
|
async.times(keycount, (i, next) => {
|
||||||
|
const key = masterVersions[i];
|
||||||
|
const params = { Bucket: bucket, Key: key };
|
||||||
|
s3.deleteObject(params, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert(data.VersionId, 'invalid versionId');
|
||||||
|
allVersions.push({ Key: key, VersionId: data.VersionId });
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should list all latest versions', done => {
|
||||||
|
const params = { Bucket: bucket, MaxKeys: 1000, Delimiter: '/' };
|
||||||
|
s3.listObjects(params, (err, data) => {
|
||||||
|
const keys = data.Contents.map(entry => entry.Key);
|
||||||
|
assert.deepStrictEqual(keys.sort(), masterVersions.sort().slice(15),
|
||||||
|
'not same keys');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should list all versions', done => {
|
||||||
|
const versions = [];
|
||||||
|
const params = { Bucket: bucket, MaxKeys: 15, Delimiter: '/' };
|
||||||
|
async.retry(100, done => s3.listObjectVersions(params, (err, data) => {
|
||||||
|
data.Versions.forEach(version => versions.push({
|
||||||
|
Key: version.Key, VersionId: version.VersionId }));
|
||||||
|
data.DeleteMarkers.forEach(version => versions.push({
|
||||||
|
Key: version.Key, VersionId: version.VersionId }));
|
||||||
|
if (data.IsTruncated) {
|
||||||
|
params.KeyMarker = data.NextKeyMarker;
|
||||||
|
params.VersionIdMarker = data.NextVersionIdMarker;
|
||||||
|
return done('not done yet');
|
||||||
|
}
|
||||||
|
return done();
|
||||||
|
}), () => {
|
||||||
|
assert.deepStrictEqual(versions.sort(comp), allVersions.sort(comp),
|
||||||
|
'not same versions');
|
||||||
|
const params = { Bucket: bucket, Delete: { Objects: allVersions } };
|
||||||
|
s3.deleteObjects(params, done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -0,0 +1,341 @@
|
||||||
|
import assert from 'assert';
|
||||||
|
import { S3 } from 'aws-sdk';
|
||||||
|
import async from 'async';
|
||||||
|
|
||||||
|
import getConfig from '../support/config';
|
||||||
|
|
||||||
|
const bucket = `versioning-bucket-${Date.now()}`;
|
||||||
|
|
||||||
|
const testing = process.env.VERSIONING === 'no' ?
|
||||||
|
describe.skip : describe;
|
||||||
|
|
||||||
|
testing('aws-node-sdk test bucket versioning', function testSuite() {
|
||||||
|
this.timeout(600000);
|
||||||
|
let s3 = undefined;
|
||||||
|
const versionIds = [];
|
||||||
|
const counter = 100;
|
||||||
|
|
||||||
|
// setup test
|
||||||
|
before(done => {
|
||||||
|
const config = getConfig('default', { signatureVersion: 'v4' });
|
||||||
|
s3 = new S3(config);
|
||||||
|
s3.createBucket({ Bucket: bucket }, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
// delete bucket after testing
|
||||||
|
after(done => s3.deleteBucket({ Bucket: bucket }, done));
|
||||||
|
|
||||||
|
it('should not accept empty versioning configuration', done => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {},
|
||||||
|
};
|
||||||
|
s3.putBucketVersioning(params, error => {
|
||||||
|
if (error) {
|
||||||
|
assert.strictEqual(error.statusCode, 400);
|
||||||
|
assert.strictEqual(
|
||||||
|
error.code, 'IllegalVersioningConfigurationException');
|
||||||
|
done();
|
||||||
|
} else {
|
||||||
|
done('accepted empty versioning configuration');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should retrieve an empty versioning configuration', done => {
|
||||||
|
const params = { Bucket: bucket };
|
||||||
|
s3.getBucketVersioning(params, (error, data) => {
|
||||||
|
assert.strictEqual(error, null);
|
||||||
|
assert.deepStrictEqual(data, {});
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not accept versioning configuration w/o \"Status\"', done => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
MFADelete: 'Enabled',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
s3.putBucketVersioning(params, error => {
|
||||||
|
if (error) {
|
||||||
|
assert.strictEqual(error.statusCode, 400);
|
||||||
|
assert.strictEqual(
|
||||||
|
error.code, 'IllegalVersioningConfigurationException');
|
||||||
|
done();
|
||||||
|
} else {
|
||||||
|
done('accepted empty versioning configuration');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should retrieve an empty versioning configuration', done => {
|
||||||
|
const params = { Bucket: bucket };
|
||||||
|
s3.getBucketVersioning(params, (error, data) => {
|
||||||
|
assert.strictEqual(error, null);
|
||||||
|
assert.deepStrictEqual(data, {});
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not accept versioning configuration w/ invalid value', done => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
MFADelete: 'fun',
|
||||||
|
Status: 'let\'s do it',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
s3.putBucketVersioning(params, error => {
|
||||||
|
if (error) {
|
||||||
|
assert.strictEqual(error.statusCode, 400);
|
||||||
|
assert.strictEqual(
|
||||||
|
error.code, 'IllegalVersioningConfigurationException');
|
||||||
|
done();
|
||||||
|
} else {
|
||||||
|
done('accepted empty versioning configuration');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should retrieve an empty versioning configuration', done => {
|
||||||
|
const params = { Bucket: bucket };
|
||||||
|
s3.getBucketVersioning(params, (error, data) => {
|
||||||
|
assert.strictEqual(error, null);
|
||||||
|
assert.deepStrictEqual(data, {});
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create a non-versioned object', done => {
|
||||||
|
const params = { Bucket: bucket, Key: '/' };
|
||||||
|
s3.putObject(params, err => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
s3.getObject(params, err => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept valid versioning configuration', done => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Enabled',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
s3.putBucketVersioning(params, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should retrieve the valid versioning configuration', done => {
|
||||||
|
const params = { Bucket: bucket };
|
||||||
|
s3.getBucketVersioning(params, (error, data) => {
|
||||||
|
assert.strictEqual(error, null);
|
||||||
|
assert.deepStrictEqual(data, { Status: 'Enabled' });
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create a new version for an object', done => {
|
||||||
|
const params = { Bucket: bucket, Key: '/' };
|
||||||
|
s3.putObject(params, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
params.VersionId = data.VersionId;
|
||||||
|
versionIds.push(data.VersionId);
|
||||||
|
s3.getObject(params, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert.strictEqual(params.VersionId, data.VersionId,
|
||||||
|
'version ids are not equal');
|
||||||
|
// TODO compare the value of null version and the original
|
||||||
|
// version when find out how to include value in the put
|
||||||
|
params.VersionId = 'null';
|
||||||
|
s3.getObject(params, done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create new versions but still keep nullVersionId', done => {
|
||||||
|
const params = { Bucket: bucket, Key: '/' };
|
||||||
|
const paramsNull = { Bucket: bucket, Key: '/', VersionId: 'null' };
|
||||||
|
let nullVersionId = undefined;
|
||||||
|
// create new versions
|
||||||
|
async.timesSeries(counter, (i, next) => s3.putObject(params,
|
||||||
|
(err, data) => {
|
||||||
|
versionIds.push(data.VersionId);
|
||||||
|
// get the 'null' version
|
||||||
|
s3.getObject(paramsNull, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
if (nullVersionId === undefined) {
|
||||||
|
nullVersionId = data.VersionId;
|
||||||
|
}
|
||||||
|
// what to expect: nullVersionId should be the same
|
||||||
|
assert(nullVersionId, 'nullVersionId should be valid');
|
||||||
|
assert.strictEqual(nullVersionId, data.VersionId);
|
||||||
|
next(err);
|
||||||
|
});
|
||||||
|
}), done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should accept valid versioning configuration', done => {
|
||||||
|
const params = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Suspended',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
s3.putBucketVersioning(params, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should retrieve the valid versioning configuration', done => {
|
||||||
|
const params = { Bucket: bucket };
|
||||||
|
// s3.getBucketVersioning(params, done);
|
||||||
|
s3.getBucketVersioning(params, (error, data) => {
|
||||||
|
assert.strictEqual(error, null);
|
||||||
|
assert.deepStrictEqual(data, { Status: 'Suspended' });
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should update null version in versioning suspended bucket', done => {
|
||||||
|
const params = { Bucket: bucket, Key: '/' };
|
||||||
|
const paramsNull = { Bucket: bucket, Key: '/', VersionId: 'null' };
|
||||||
|
// let nullVersionId = undefined;
|
||||||
|
// let newNullVersionId = undefined;
|
||||||
|
async.waterfall([
|
||||||
|
callback => s3.getObject(paramsNull, err => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
// nullVersionId = data.VersionId;
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
callback => s3.putObject(params, err => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
versionIds.push('null');
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
callback => s3.getObject(paramsNull, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert.strictEqual(data.VersionId, 'null',
|
||||||
|
'version ids are equal');
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
callback => s3.getObject(params, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert.strictEqual(data.VersionId, 'null',
|
||||||
|
'version ids are not equal');
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should enable versioning and preserve the null version', done => {
|
||||||
|
const paramsVersioning = {
|
||||||
|
Bucket: bucket,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Enabled',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
const params = { Bucket: bucket, Key: '/' };
|
||||||
|
const paramsNull = { Bucket: bucket, Key: '/', VersionId: 'null' };
|
||||||
|
let nullVersionId = undefined;
|
||||||
|
async.waterfall([
|
||||||
|
callback => s3.getObject(paramsNull, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
nullVersionId = data.VersionId;
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
callback => s3.putBucketVersioning(paramsVersioning,
|
||||||
|
err => callback(err)),
|
||||||
|
callback => async.timesSeries(counter, (i, next) =>
|
||||||
|
s3.putObject(params, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
versionIds.push(data.VersionId);
|
||||||
|
next();
|
||||||
|
}), err => callback(err)),
|
||||||
|
callback => s3.getObject(paramsNull, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert.strictEqual(nullVersionId, data.VersionId,
|
||||||
|
'version ids are not equal');
|
||||||
|
callback();
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create delete marker and keep the null version', done => {
|
||||||
|
const params = { Bucket: bucket, Key: '/' };
|
||||||
|
const paramsNull = { Bucket: bucket, Key: '/', VersionId: 'null' };
|
||||||
|
s3.getObject(paramsNull, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
const nullVersionId = data.VersionId;
|
||||||
|
async.timesSeries(counter, (i, next) => s3.deleteObject(params,
|
||||||
|
(err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
versionIds.push(data.VersionId);
|
||||||
|
s3.getObject(params, err => {
|
||||||
|
assert.strictEqual(err.code, 'NoSuchKey');
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
}), err => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
s3.getObject(paramsNull, (err, data) => {
|
||||||
|
assert.strictEqual(nullVersionId, data.VersionId,
|
||||||
|
'version ids are not equal');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should delete latest version and get the next version', done => {
|
||||||
|
versionIds.reverse();
|
||||||
|
const params = { Bucket: bucket, Key: '/' };
|
||||||
|
async.timesSeries(versionIds.length, (i, next) => {
|
||||||
|
const versionId = versionIds[i];
|
||||||
|
const nextVersionId = i < versionIds.length ?
|
||||||
|
versionIds[i + 1] : undefined;
|
||||||
|
const paramsVersion =
|
||||||
|
{ Bucket: bucket, Key: '/', VersionId: versionId };
|
||||||
|
s3.deleteObject(paramsVersion, err => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
s3.getObject(params, (err, data) => {
|
||||||
|
if (err) {
|
||||||
|
assert(err.code === 'NotFound' ||
|
||||||
|
err.code === 'NoSuchKey', 'error');
|
||||||
|
} else {
|
||||||
|
assert(data.VersionId, 'invalid versionId');
|
||||||
|
if (nextVersionId !== 'null') {
|
||||||
|
assert.strictEqual(data.VersionId, nextVersionId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
next();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}, done);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create a bunch of objects and their versions', done => {
|
||||||
|
const vids = [];
|
||||||
|
const keycount = 50;
|
||||||
|
const versioncount = 20;
|
||||||
|
const value = '{"foo":"bar"}';
|
||||||
|
async.times(keycount, (i, next1) => {
|
||||||
|
const key = `foo${i}`;
|
||||||
|
const params = { Bucket: bucket, Key: key, Body: value };
|
||||||
|
async.times(versioncount, (j, next2) =>
|
||||||
|
s3.putObject(params, (err, data) => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert(data.VersionId, 'invalid versionId');
|
||||||
|
vids.push({ Key: key, VersionId: data.VersionId });
|
||||||
|
next2();
|
||||||
|
}), next1);
|
||||||
|
}, err => {
|
||||||
|
assert.strictEqual(err, null);
|
||||||
|
assert.strictEqual(vids.length, keycount * versioncount);
|
||||||
|
const params = { Bucket: bucket, Delete: { Objects: vids } };
|
||||||
|
// TODO use delete marker and check with the result
|
||||||
|
process.stdout.write('creating objects done, now deleting...');
|
||||||
|
s3.deleteObjects(params, done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
|
@ -8,7 +8,7 @@ const objectKey = 'key';
|
||||||
const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it;
|
const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it;
|
||||||
|
|
||||||
describe('unsupported query requests:', () => {
|
describe('unsupported query requests:', () => {
|
||||||
constants.unsupportedQueries.forEach(query => {
|
Object.keys(constants.unsupportedQueries).forEach(query => {
|
||||||
itSkipIfAWS(`should respond with NotImplemented for ?${query} request`,
|
itSkipIfAWS(`should respond with NotImplemented for ?${query} request`,
|
||||||
done => {
|
done => {
|
||||||
const queryObj = {};
|
const queryObj = {};
|
||||||
|
@ -24,7 +24,7 @@ describe('unsupported query requests:', () => {
|
||||||
|
|
||||||
itSkipIfAWS('should accept blacklisted query key as a query value ' +
|
itSkipIfAWS('should accept blacklisted query key as a query value ' +
|
||||||
'to a query key that is not on the blacklist', done => {
|
'to a query key that is not on the blacklist', done => {
|
||||||
const queryObj = { test: constants.unsupportedQueries[0] };
|
const queryObj = { test: Object.keys(constants.unsupportedQueries)[0] };
|
||||||
makeS3Request({ method: 'GET', queryObj, bucket, objectKey }, err => {
|
makeS3Request({ method: 'GET', queryObj, bucket, objectKey }, err => {
|
||||||
assert.strictEqual(err.code, 'NoSuchBucket');
|
assert.strictEqual(err.code, 'NoSuchBucket');
|
||||||
assert.strictEqual(err.statusCode, 404);
|
assert.strictEqual(err.statusCode, 404);
|
||||||
|
|
|
@ -11,7 +11,7 @@ import constants from '../../../constants';
|
||||||
import initiateMultipartUpload from '../../../lib/api/initiateMultipartUpload';
|
import initiateMultipartUpload from '../../../lib/api/initiateMultipartUpload';
|
||||||
import metadata from '../metadataswitch';
|
import metadata from '../metadataswitch';
|
||||||
import * as metadataMem from '../../../lib/metadata/in_memory/metadata';
|
import * as metadataMem from '../../../lib/metadata/in_memory/metadata';
|
||||||
import objectPut from '../../../lib/api/objectPut';
|
import { objectPut } from '../../../lib/api/objectPut';
|
||||||
import objectPutPart from '../../../lib/api/objectPutPart';
|
import objectPutPart from '../../../lib/api/objectPutPart';
|
||||||
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
|
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
|
||||||
import DummyRequest from '../DummyRequest';
|
import DummyRequest from '../DummyRequest';
|
||||||
|
|
|
@ -6,7 +6,7 @@ import { parseString } from 'xml2js';
|
||||||
|
|
||||||
import bucketGet from '../../../lib/api/bucketGet';
|
import bucketGet from '../../../lib/api/bucketGet';
|
||||||
import bucketPut from '../../../lib/api/bucketPut';
|
import bucketPut from '../../../lib/api/bucketPut';
|
||||||
import objectPut from '../../../lib/api/objectPut';
|
import { objectPut } from '../../../lib/api/objectPut';
|
||||||
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
|
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
|
||||||
import DummyRequest from '../DummyRequest';
|
import DummyRequest from '../DummyRequest';
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@ import objectDelete from '../../../lib/api/objectDelete';
|
||||||
import objectGet from '../../../lib/api/objectGet';
|
import objectGet from '../../../lib/api/objectGet';
|
||||||
import objectGetACL from '../../../lib/api/objectGetACL';
|
import objectGetACL from '../../../lib/api/objectGetACL';
|
||||||
import objectHead from '../../../lib/api/objectHead';
|
import objectHead from '../../../lib/api/objectHead';
|
||||||
import objectPut from '../../../lib/api/objectPut';
|
import { objectPut } from '../../../lib/api/objectPut';
|
||||||
import objectPutACL from '../../../lib/api/objectPutACL';
|
import objectPutACL from '../../../lib/api/objectPutACL';
|
||||||
import objectPutPart from '../../../lib/api/objectPutPart';
|
import objectPutPart from '../../../lib/api/objectPutPart';
|
||||||
import { DummyRequestLogger, makeAuthInfo } from '../helpers';
|
import { DummyRequestLogger, makeAuthInfo } from '../helpers';
|
||||||
|
|
|
@ -7,7 +7,7 @@ import { metadata } from '../../../lib/metadata/in_memory/metadata';
|
||||||
import { ds } from '../../../lib/data/in_memory/backend';
|
import { ds } from '../../../lib/data/in_memory/backend';
|
||||||
import DummyRequest from '../DummyRequest';
|
import DummyRequest from '../DummyRequest';
|
||||||
import bucketPut from '../../../lib/api/bucketPut';
|
import bucketPut from '../../../lib/api/bucketPut';
|
||||||
import objectPut from '../../../lib/api/objectPut';
|
import { objectPut } from '../../../lib/api/objectPut';
|
||||||
|
|
||||||
const log = new DummyRequestLogger();
|
const log = new DummyRequestLogger();
|
||||||
const canonicalID = 'accessKey1';
|
const canonicalID = 'accessKey1';
|
||||||
|
@ -29,6 +29,11 @@ const testBucketPutRequest = new DummyRequest({
|
||||||
describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
|
describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
|
||||||
let testPutObjectRequest1;
|
let testPutObjectRequest1;
|
||||||
let testPutObjectRequest2;
|
let testPutObjectRequest2;
|
||||||
|
const request = new DummyRequest({
|
||||||
|
headers: {},
|
||||||
|
parsedContentLength: contentLength,
|
||||||
|
}, postBody);
|
||||||
|
const bucket = { getVersioningConfiguration: () => null };
|
||||||
|
|
||||||
beforeEach(done => {
|
beforeEach(done => {
|
||||||
cleanup();
|
cleanup();
|
||||||
|
@ -66,8 +71,8 @@ describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
|
||||||
|
|
||||||
it('should successfully get object metadata and then ' +
|
it('should successfully get object metadata and then ' +
|
||||||
'delete metadata and data', done => {
|
'delete metadata and data', done => {
|
||||||
getObjMetadataAndDelete(bucketName, true,
|
getObjMetadataAndDelete(authInfo, 'foo', request, bucketName, bucket,
|
||||||
[], [objectKey1, objectKey2], log,
|
true, [], [{ key: objectKey1 }, { key: objectKey2 }], log,
|
||||||
(err, quietSetting, errorResults, numOfObjects,
|
(err, quietSetting, errorResults, numOfObjects,
|
||||||
successfullyDeleted, totalContentLengthDeleted) => {
|
successfullyDeleted, totalContentLengthDeleted) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
@ -90,8 +95,8 @@ describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should return success results if no such key', done => {
|
it('should return success results if no such key', done => {
|
||||||
getObjMetadataAndDelete(bucketName, true,
|
getObjMetadataAndDelete(authInfo, 'foo', request, bucketName, bucket,
|
||||||
[], ['madeup1', 'madeup2'], log,
|
true, [], [{ key: 'madeup1' }, { key: 'madeup2' }], log,
|
||||||
(err, quietSetting, errorResults, numOfObjects,
|
(err, quietSetting, errorResults, numOfObjects,
|
||||||
successfullyDeleted, totalContentLengthDeleted) => {
|
successfullyDeleted, totalContentLengthDeleted) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
@ -114,19 +119,19 @@ describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
|
||||||
// even though the getObjMetadataAndDelete function would
|
// even though the getObjMetadataAndDelete function would
|
||||||
// never be called if there was no bucket (would error out earlier
|
// never be called if there was no bucket (would error out earlier
|
||||||
// in API)
|
// in API)
|
||||||
getObjMetadataAndDelete('madeupbucket', true,
|
getObjMetadataAndDelete(authInfo, 'foo', request, 'madeupbucket',
|
||||||
[], [objectKey1, objectKey2], log,
|
bucket, true, [], [{ key: objectKey1 }, { key: objectKey2 }], log,
|
||||||
(err, quietSetting, errorResults, numOfObjects,
|
(err, quietSetting, errorResults, numOfObjects,
|
||||||
successfullyDeleted, totalContentLengthDeleted) => {
|
successfullyDeleted, totalContentLengthDeleted) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
assert.strictEqual(quietSetting, true);
|
assert.strictEqual(quietSetting, true);
|
||||||
assert.deepStrictEqual(errorResults, [
|
assert.deepStrictEqual(errorResults, [
|
||||||
{
|
{
|
||||||
key: objectKey1,
|
entry: { key: objectKey1 },
|
||||||
error: errors.NoSuchBucket,
|
error: errors.NoSuchBucket,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
key: objectKey2,
|
entry: { key: objectKey2 },
|
||||||
error: errors.NoSuchBucket,
|
error: errors.NoSuchBucket,
|
||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
|
@ -142,8 +147,8 @@ describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
|
||||||
|
|
||||||
it('should return no error or success results if no objects in play',
|
it('should return no error or success results if no objects in play',
|
||||||
done => {
|
done => {
|
||||||
getObjMetadataAndDelete(bucketName, true,
|
getObjMetadataAndDelete(authInfo, 'foo', request, bucketName,
|
||||||
[], [], log,
|
bucket, true, [], [], log,
|
||||||
(err, quietSetting, errorResults, numOfObjects,
|
(err, quietSetting, errorResults, numOfObjects,
|
||||||
successfullyDeleted, totalContentLengthDeleted) => {
|
successfullyDeleted, totalContentLengthDeleted) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
@ -167,8 +172,9 @@ describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
|
||||||
error: errors.AccessDenied,
|
error: errors.AccessDenied,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
getObjMetadataAndDelete(bucketName, true,
|
getObjMetadataAndDelete(authInfo, 'foo', request, bucketName, bucket,
|
||||||
errorResultsSample, [objectKey1, objectKey2], log,
|
true, errorResultsSample,
|
||||||
|
[{ key: objectKey1 }, { key: objectKey2 }], log,
|
||||||
(err, quietSetting, errorResults, numOfObjects,
|
(err, quietSetting, errorResults, numOfObjects,
|
||||||
successfullyDeleted, totalContentLengthDeleted) => {
|
successfullyDeleted, totalContentLengthDeleted) => {
|
||||||
assert.ifError(err);
|
assert.ifError(err);
|
||||||
|
|
|
@ -5,7 +5,7 @@ import bucketPut from '../../../lib/api/bucketPut';
|
||||||
import bucketPutACL from '../../../lib/api/bucketPutACL';
|
import bucketPutACL from '../../../lib/api/bucketPutACL';
|
||||||
import constants from '../../../constants';
|
import constants from '../../../constants';
|
||||||
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
|
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
|
||||||
import objectPut from '../../../lib/api/objectPut';
|
import { objectPut } from '../../../lib/api/objectPut';
|
||||||
import objectDelete from '../../../lib/api/objectDelete';
|
import objectDelete from '../../../lib/api/objectDelete';
|
||||||
import objectGet from '../../../lib/api/objectGet';
|
import objectGet from '../../../lib/api/objectGet';
|
||||||
import DummyRequest from '../DummyRequest';
|
import DummyRequest from '../DummyRequest';
|
||||||
|
|
|
@ -8,7 +8,7 @@ import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
|
||||||
import completeMultipartUpload from '../../../lib/api/completeMultipartUpload';
|
import completeMultipartUpload from '../../../lib/api/completeMultipartUpload';
|
||||||
import DummyRequest from '../DummyRequest';
|
import DummyRequest from '../DummyRequest';
|
||||||
import initiateMultipartUpload from '../../../lib/api/initiateMultipartUpload';
|
import initiateMultipartUpload from '../../../lib/api/initiateMultipartUpload';
|
||||||
import objectPut from '../../../lib/api/objectPut';
|
import { objectPut } from '../../../lib/api/objectPut';
|
||||||
import objectGet from '../../../lib/api/objectGet';
|
import objectGet from '../../../lib/api/objectGet';
|
||||||
import objectPutPart from '../../../lib/api/objectPutPart';
|
import objectPutPart from '../../../lib/api/objectPutPart';
|
||||||
|
|
||||||
|
@ -61,7 +61,7 @@ describe('objectGet API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined,
|
objectPut(authInfo, testPutObjectRequest, undefined,
|
||||||
log, (err, result) => {
|
log, (err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectGet(authInfo, testGetRequest,
|
objectGet(authInfo, testGetRequest,
|
||||||
log, (err, result, responseMetaHeaders) => {
|
log, (err, result, responseMetaHeaders) => {
|
||||||
assert.strictEqual(responseMetaHeaders
|
assert.strictEqual(responseMetaHeaders
|
||||||
|
@ -80,7 +80,7 @@ describe('objectGet API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectGet(authInfo, testGetRequest, log,
|
objectGet(authInfo, testGetRequest, log,
|
||||||
(err, dataGetInfo) => {
|
(err, dataGetInfo) => {
|
||||||
assert.deepStrictEqual(dataGetInfo,
|
assert.deepStrictEqual(dataGetInfo,
|
||||||
|
@ -226,7 +226,7 @@ describe('objectGet API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectGet(authInfo, testGetRequest,
|
objectGet(authInfo, testGetRequest,
|
||||||
log, (err, result, responseMetaHeaders) => {
|
log, (err, result, responseMetaHeaders) => {
|
||||||
assert.strictEqual(result, null);
|
assert.strictEqual(result, null);
|
||||||
|
|
|
@ -7,7 +7,7 @@ import { parseString } from 'xml2js';
|
||||||
import bucketPut from '../../../lib/api/bucketPut';
|
import bucketPut from '../../../lib/api/bucketPut';
|
||||||
import constants from '../../../constants';
|
import constants from '../../../constants';
|
||||||
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
|
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
|
||||||
import objectPut from '../../../lib/api/objectPut';
|
import { objectPut } from '../../../lib/api/objectPut';
|
||||||
import objectGetACL from '../../../lib/api/objectGetACL';
|
import objectGetACL from '../../../lib/api/objectGetACL';
|
||||||
import DummyRequest from '../DummyRequest';
|
import DummyRequest from '../DummyRequest';
|
||||||
|
|
||||||
|
@ -63,7 +63,7 @@ describe('objectGetACL API', () => {
|
||||||
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
|
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
|
||||||
undefined, log, next),
|
undefined, log, next),
|
||||||
(result, corsHeaders, next) => {
|
(result, corsHeaders, next) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectGetACL(authInfo, testGetACLRequest, log, next);
|
objectGetACL(authInfo, testGetACLRequest, log, next);
|
||||||
},
|
},
|
||||||
(result, corsHeaders, next) => parseString(result, next),
|
(result, corsHeaders, next) => parseString(result, next),
|
||||||
|
@ -104,7 +104,7 @@ describe('objectGetACL API', () => {
|
||||||
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
|
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
|
||||||
undefined, log, next),
|
undefined, log, next),
|
||||||
(result, corsHeaders, next) => {
|
(result, corsHeaders, next) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectGetACL(authInfo, testGetACLRequest, log, next);
|
objectGetACL(authInfo, testGetACLRequest, log, next);
|
||||||
},
|
},
|
||||||
(result, corsHeaders, next) => parseString(result, next),
|
(result, corsHeaders, next) => parseString(result, next),
|
||||||
|
@ -141,7 +141,7 @@ describe('objectGetACL API', () => {
|
||||||
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
|
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
|
||||||
undefined, log, next),
|
undefined, log, next),
|
||||||
(result, corsHeaders, next) => {
|
(result, corsHeaders, next) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectGetACL(authInfo, testGetACLRequest, log, next);
|
objectGetACL(authInfo, testGetACLRequest, log, next);
|
||||||
},
|
},
|
||||||
(result, corsHeaders, next) => parseString(result, next),
|
(result, corsHeaders, next) => parseString(result, next),
|
||||||
|
@ -185,7 +185,7 @@ describe('objectGetACL API', () => {
|
||||||
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
|
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
|
||||||
undefined, log, next),
|
undefined, log, next),
|
||||||
(result, corsHeaders, next) => {
|
(result, corsHeaders, next) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectGetACL(authInfo, testGetACLRequest, log, next);
|
objectGetACL(authInfo, testGetACLRequest, log, next);
|
||||||
},
|
},
|
||||||
(result, corsHeaders, next) => parseString(result, next),
|
(result, corsHeaders, next) => parseString(result, next),
|
||||||
|
@ -226,7 +226,7 @@ describe('objectGetACL API', () => {
|
||||||
(corsHeaders, next) => objectPut(
|
(corsHeaders, next) => objectPut(
|
||||||
authInfo, testPutObjectRequest, undefined, log, next),
|
authInfo, testPutObjectRequest, undefined, log, next),
|
||||||
(result, corsHeaders, next) => {
|
(result, corsHeaders, next) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectGetACL(authInfo, testGetACLRequest, log, next);
|
objectGetACL(authInfo, testGetACLRequest, log, next);
|
||||||
},
|
},
|
||||||
(result, corsHeaders, next) => parseString(result, next),
|
(result, corsHeaders, next) => parseString(result, next),
|
||||||
|
@ -266,7 +266,7 @@ describe('objectGetACL API', () => {
|
||||||
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
|
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
|
||||||
undefined, log, next),
|
undefined, log, next),
|
||||||
(result, corsHeaders, next) => {
|
(result, corsHeaders, next) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectGetACL(authInfo, testGetACLRequest, log, next);
|
objectGetACL(authInfo, testGetACLRequest, log, next);
|
||||||
},
|
},
|
||||||
(result, corsHeaders, next) => parseString(result, next),
|
(result, corsHeaders, next) => parseString(result, next),
|
||||||
|
@ -316,7 +316,7 @@ describe('objectGetACL API', () => {
|
||||||
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
|
(corsHeaders, next) => objectPut(authInfo, testPutObjectRequest,
|
||||||
undefined, log, next),
|
undefined, log, next),
|
||||||
(result, corsHeaders, next) => {
|
(result, corsHeaders, next) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectGetACL(authInfo, testGetACLRequest, log, next);
|
objectGetACL(authInfo, testGetACLRequest, log, next);
|
||||||
},
|
},
|
||||||
(result, corsHeaders, next) => parseString(result, next),
|
(result, corsHeaders, next) => parseString(result, next),
|
||||||
|
|
|
@ -3,7 +3,7 @@ import assert from 'assert';
|
||||||
|
|
||||||
import bucketPut from '../../../lib/api/bucketPut';
|
import bucketPut from '../../../lib/api/bucketPut';
|
||||||
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
|
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
|
||||||
import objectPut from '../../../lib/api/objectPut';
|
import { objectPut } from '../../../lib/api/objectPut';
|
||||||
import objectHead from '../../../lib/api/objectHead';
|
import objectHead from '../../../lib/api/objectHead';
|
||||||
import DummyRequest from '../DummyRequest';
|
import DummyRequest from '../DummyRequest';
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ describe('objectHead API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectHead(authInfo, testGetRequest, log, err => {
|
objectHead(authInfo, testGetRequest, log, err => {
|
||||||
assert.deepStrictEqual(err, errors.NotModified);
|
assert.deepStrictEqual(err, errors.NotModified);
|
||||||
done();
|
done();
|
||||||
|
@ -82,7 +82,7 @@ describe('objectHead API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectHead(authInfo, testGetRequest, log, err => {
|
objectHead(authInfo, testGetRequest, log, err => {
|
||||||
assert.deepStrictEqual(err,
|
assert.deepStrictEqual(err,
|
||||||
errors.PreconditionFailed);
|
errors.PreconditionFailed);
|
||||||
|
@ -107,7 +107,7 @@ describe('objectHead API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectHead(authInfo, testGetRequest, log, err => {
|
objectHead(authInfo, testGetRequest, log, err => {
|
||||||
assert.deepStrictEqual(err,
|
assert.deepStrictEqual(err,
|
||||||
errors.PreconditionFailed);
|
errors.PreconditionFailed);
|
||||||
|
@ -132,7 +132,7 @@ describe('objectHead API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectHead(authInfo, testGetRequest, log, err => {
|
objectHead(authInfo, testGetRequest, log, err => {
|
||||||
assert.deepStrictEqual(err, errors.NotModified);
|
assert.deepStrictEqual(err, errors.NotModified);
|
||||||
done();
|
done();
|
||||||
|
@ -154,7 +154,7 @@ describe('objectHead API', () => {
|
||||||
locationConstraint, log, () => {
|
locationConstraint, log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectHead(authInfo, testGetRequest, log,
|
objectHead(authInfo, testGetRequest, log,
|
||||||
(err, success) => {
|
(err, success) => {
|
||||||
assert.strictEqual(success[userMetadataKey],
|
assert.strictEqual(success[userMetadataKey],
|
||||||
|
|
|
@ -6,7 +6,7 @@ import bucketPutACL from '../../../lib/api/bucketPutACL';
|
||||||
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
|
import { cleanup, DummyRequestLogger, makeAuthInfo } from '../helpers';
|
||||||
import { ds } from '../../../lib/data/in_memory/backend';
|
import { ds } from '../../../lib/data/in_memory/backend';
|
||||||
import metadata from '../metadataswitch';
|
import metadata from '../metadataswitch';
|
||||||
import objectPut from '../../../lib/api/objectPut';
|
import { objectPut } from '../../../lib/api/objectPut';
|
||||||
import DummyRequest from '../DummyRequest';
|
import DummyRequest from '../DummyRequest';
|
||||||
|
|
||||||
const log = new DummyRequestLogger();
|
const log = new DummyRequestLogger();
|
||||||
|
@ -35,7 +35,7 @@ function testAuth(bucketOwner, authUser, bucketPutReq, log, cb) {
|
||||||
objectPut(authUser, testPutObjectRequest, undefined,
|
objectPut(authUser, testPutObjectRequest, undefined,
|
||||||
log, (err, res) => {
|
log, (err, res) => {
|
||||||
assert.strictEqual(err, null);
|
assert.strictEqual(err, null);
|
||||||
assert.strictEqual(res, correctMD5);
|
assert.strictEqual(res.contentMD5, correctMD5);
|
||||||
cb();
|
cb();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -112,7 +112,7 @@ describe('objectPut API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
metadata.getObjectMD(bucketName, objectName,
|
metadata.getObjectMD(bucketName, objectName,
|
||||||
{}, log, (err, md) => {
|
{}, log, (err, md) => {
|
||||||
assert(md);
|
assert(md);
|
||||||
|
@ -147,7 +147,7 @@ describe('objectPut API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
metadata.getObjectMD(bucketName, objectName, {}, log,
|
metadata.getObjectMD(bucketName, objectName, {}, log,
|
||||||
(err, md) => {
|
(err, md) => {
|
||||||
assert(md);
|
assert(md);
|
||||||
|
@ -185,7 +185,7 @@ describe('objectPut API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
assert.deepStrictEqual(ds, []);
|
assert.deepStrictEqual(ds, []);
|
||||||
metadata.getObjectMD(bucketName, objectName, {}, log,
|
metadata.getObjectMD(bucketName, objectName, {}, log,
|
||||||
(err, md) => {
|
(err, md) => {
|
||||||
|
|
|
@ -9,7 +9,7 @@ import { cleanup,
|
||||||
AccessControlPolicy,
|
AccessControlPolicy,
|
||||||
} from '../helpers';
|
} from '../helpers';
|
||||||
import metadata from '../metadataswitch';
|
import metadata from '../metadataswitch';
|
||||||
import objectPut from '../../../lib/api/objectPut';
|
import { objectPut } from '../../../lib/api/objectPut';
|
||||||
import objectPutACL from '../../../lib/api/objectPutACL';
|
import objectPutACL from '../../../lib/api/objectPutACL';
|
||||||
import DummyRequest from '../DummyRequest';
|
import DummyRequest from '../DummyRequest';
|
||||||
|
|
||||||
|
@ -63,7 +63,7 @@ describe('putObjectACL API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||||
assert
|
assert
|
||||||
.deepStrictEqual(err, errors.InvalidArgument);
|
.deepStrictEqual(err, errors.InvalidArgument);
|
||||||
|
@ -87,7 +87,7 @@ describe('putObjectACL API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||||
assert.strictEqual(err, null);
|
assert.strictEqual(err, null);
|
||||||
metadata.getObjectMD(bucketName, objectName, {},
|
metadata.getObjectMD(bucketName, objectName, {},
|
||||||
|
@ -125,7 +125,7 @@ describe('putObjectACL API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectPutACL(authInfo, testObjACLRequest1, log, err => {
|
objectPutACL(authInfo, testObjACLRequest1, log, err => {
|
||||||
assert.strictEqual(err, null);
|
assert.strictEqual(err, null);
|
||||||
metadata.getObjectMD(bucketName, objectName, {},
|
metadata.getObjectMD(bucketName, objectName, {},
|
||||||
|
@ -169,7 +169,7 @@ describe('putObjectACL API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||||
assert.strictEqual(err, null);
|
assert.strictEqual(err, null);
|
||||||
metadata.getObjectMD(bucketName, objectName, {},
|
metadata.getObjectMD(bucketName, objectName, {},
|
||||||
|
@ -212,7 +212,7 @@ describe('putObjectACL API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||||
assert.strictEqual(err,
|
assert.strictEqual(err,
|
||||||
errors.UnresolvableGrantByEmailAddress);
|
errors.UnresolvableGrantByEmailAddress);
|
||||||
|
@ -244,7 +244,7 @@ describe('putObjectACL API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined,
|
objectPut(authInfo, testPutObjectRequest, undefined,
|
||||||
log, (err, result) => {
|
log, (err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||||
assert.strictEqual(err, null);
|
assert.strictEqual(err, null);
|
||||||
metadata.getObjectMD(bucketName, objectName, {},
|
metadata.getObjectMD(bucketName, objectName, {},
|
||||||
|
@ -310,7 +310,7 @@ describe('putObjectACL API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||||
assert.strictEqual(err, null);
|
assert.strictEqual(err, null);
|
||||||
metadata.getObjectMD(bucketName, objectName, {},
|
metadata.getObjectMD(bucketName, objectName, {},
|
||||||
|
@ -350,7 +350,7 @@ describe('putObjectACL API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||||
assert.strictEqual(err,
|
assert.strictEqual(err,
|
||||||
errors.UnresolvableGrantByEmailAddress);
|
errors.UnresolvableGrantByEmailAddress);
|
||||||
|
@ -381,7 +381,7 @@ describe('putObjectACL API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||||
assert.deepStrictEqual(err,
|
assert.deepStrictEqual(err,
|
||||||
errors.MalformedACLError);
|
errors.MalformedACLError);
|
||||||
|
@ -411,7 +411,7 @@ describe('putObjectACL API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||||
assert.deepStrictEqual(err, errors.MalformedXML);
|
assert.deepStrictEqual(err, errors.MalformedXML);
|
||||||
done();
|
done();
|
||||||
|
@ -440,7 +440,7 @@ describe('putObjectACL API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||||
done();
|
done();
|
||||||
|
@ -469,7 +469,7 @@ describe('putObjectACL API', () => {
|
||||||
log, () => {
|
log, () => {
|
||||||
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
objectPut(authInfo, testPutObjectRequest, undefined, log,
|
||||||
(err, result) => {
|
(err, result) => {
|
||||||
assert.strictEqual(result, correctMD5);
|
assert.strictEqual(result.contentMD5, correctMD5);
|
||||||
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
objectPutACL(authInfo, testObjACLRequest, log, err => {
|
||||||
assert.deepStrictEqual(err, errors.InvalidArgument);
|
assert.deepStrictEqual(err, errors.InvalidArgument);
|
||||||
done();
|
done();
|
||||||
|
|
|
@ -31,7 +31,7 @@ import objectDelete from '../../../lib/api/objectDelete';
|
||||||
import objectGet from '../../../lib/api/objectGet';
|
import objectGet from '../../../lib/api/objectGet';
|
||||||
import objectGetACL from '../../../lib/api/objectGetACL';
|
import objectGetACL from '../../../lib/api/objectGetACL';
|
||||||
import objectHead from '../../../lib/api/objectHead';
|
import objectHead from '../../../lib/api/objectHead';
|
||||||
import objectPut from '../../../lib/api/objectPut';
|
import { objectPut } from '../../../lib/api/objectPut';
|
||||||
import objectPutACL from '../../../lib/api/objectPutACL';
|
import objectPutACL from '../../../lib/api/objectPutACL';
|
||||||
import objectPutPart from '../../../lib/api/objectPutPart';
|
import objectPutPart from '../../../lib/api/objectPutPart';
|
||||||
import { DummyRequestLogger, makeAuthInfo } from '../helpers';
|
import { DummyRequestLogger, makeAuthInfo } from '../helpers';
|
||||||
|
|
Loading…
Reference in New Issue