Compare commits
6 Commits
developmen
...
feature/CL
Author | SHA1 | Date |
---|---|---|
Will Toozs | 6ff75dbe88 | |
Will Toozs | b740e13d15 | |
Will Toozs | 9d9c4ae66d | |
Will Toozs | 41b839c014 | |
Will Toozs | 6f82826984 | |
Will Toozs | efc79b293c |
|
@ -215,6 +215,7 @@ const constants = {
|
||||||
'initiateMultipartUpload',
|
'initiateMultipartUpload',
|
||||||
'objectPutPart',
|
'objectPutPart',
|
||||||
'completeMultipartUpload',
|
'completeMultipartUpload',
|
||||||
|
'objectPost',
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -52,6 +52,7 @@ const objectGetRetention = require('./objectGetRetention');
|
||||||
const objectGetTagging = require('./objectGetTagging');
|
const objectGetTagging = require('./objectGetTagging');
|
||||||
const objectHead = require('./objectHead');
|
const objectHead = require('./objectHead');
|
||||||
const objectPut = require('./objectPut');
|
const objectPut = require('./objectPut');
|
||||||
|
const objectPost = require('./objectPost');
|
||||||
const objectPutACL = require('./objectPutACL');
|
const objectPutACL = require('./objectPutACL');
|
||||||
const objectPutLegalHold = require('./objectPutLegalHold');
|
const objectPutLegalHold = require('./objectPutLegalHold');
|
||||||
const objectPutTagging = require('./objectPutTagging');
|
const objectPutTagging = require('./objectPutTagging');
|
||||||
|
@ -67,7 +68,9 @@ const writeContinue = require('../utilities/writeContinue');
|
||||||
const validateQueryAndHeaders = require('../utilities/validateQueryAndHeaders');
|
const validateQueryAndHeaders = require('../utilities/validateQueryAndHeaders');
|
||||||
const parseCopySource = require('./apiUtils/object/parseCopySource');
|
const parseCopySource = require('./apiUtils/object/parseCopySource');
|
||||||
const { tagConditionKeyAuth } = require('./apiUtils/authorization/tagConditionKeys');
|
const { tagConditionKeyAuth } = require('./apiUtils/authorization/tagConditionKeys');
|
||||||
|
const { checkAuthResults } = require('./apiUtils/authorization/permissionChecks');
|
||||||
const checkHttpHeadersSize = require('./apiUtils/object/checkHttpHeadersSize');
|
const checkHttpHeadersSize = require('./apiUtils/object/checkHttpHeadersSize');
|
||||||
|
const { processPostForm } = require('./apiUtils/apiCallers/callPostObject');
|
||||||
|
|
||||||
const monitoringMap = policies.actionMaps.actionMonitoringMapS3;
|
const monitoringMap = policies.actionMaps.actionMonitoringMapS3;
|
||||||
|
|
||||||
|
@ -142,49 +145,6 @@ const api = {
|
||||||
// eslint-disable-next-line no-param-reassign
|
// eslint-disable-next-line no-param-reassign
|
||||||
request.apiMethods = apiMethods;
|
request.apiMethods = apiMethods;
|
||||||
|
|
||||||
function checkAuthResults(authResults) {
|
|
||||||
let returnTagCount = true;
|
|
||||||
const isImplicitDeny = {};
|
|
||||||
let isOnlyImplicitDeny = true;
|
|
||||||
if (apiMethod === 'objectGet') {
|
|
||||||
// first item checks s3:GetObject(Version) action
|
|
||||||
if (!authResults[0].isAllowed && !authResults[0].isImplicit) {
|
|
||||||
log.trace('get object authorization denial from Vault');
|
|
||||||
return errors.AccessDenied;
|
|
||||||
}
|
|
||||||
// TODO add support for returnTagCount in the bucket policy
|
|
||||||
// checks
|
|
||||||
isImplicitDeny[authResults[0].action] = authResults[0].isImplicit;
|
|
||||||
// second item checks s3:GetObject(Version)Tagging action
|
|
||||||
if (!authResults[1].isAllowed) {
|
|
||||||
log.trace('get tagging authorization denial ' +
|
|
||||||
'from Vault');
|
|
||||||
returnTagCount = false;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for (let i = 0; i < authResults.length; i++) {
|
|
||||||
isImplicitDeny[authResults[i].action] = true;
|
|
||||||
if (!authResults[i].isAllowed && !authResults[i].isImplicit) {
|
|
||||||
// Any explicit deny rejects the current API call
|
|
||||||
log.trace('authorization denial from Vault');
|
|
||||||
return errors.AccessDenied;
|
|
||||||
}
|
|
||||||
if (authResults[i].isAllowed) {
|
|
||||||
// If the action is allowed, the result is not implicit
|
|
||||||
// Deny.
|
|
||||||
isImplicitDeny[authResults[i].action] = false;
|
|
||||||
isOnlyImplicitDeny = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// These two APIs cannot use ACLs or Bucket Policies, hence, any
|
|
||||||
// implicit deny from vault must be treated as an explicit deny.
|
|
||||||
if ((apiMethod === 'bucketPut' || apiMethod === 'serviceGet') && isOnlyImplicitDeny) {
|
|
||||||
return errors.AccessDenied;
|
|
||||||
}
|
|
||||||
return { returnTagCount, isImplicitDeny };
|
|
||||||
}
|
|
||||||
|
|
||||||
return async.waterfall([
|
return async.waterfall([
|
||||||
next => auth.server.doAuth(
|
next => auth.server.doAuth(
|
||||||
request, log, (err, userInfo, authorizationResults, streamingV4Params) => {
|
request, log, (err, userInfo, authorizationResults, streamingV4Params) => {
|
||||||
|
@ -256,7 +216,7 @@ const api = {
|
||||||
return callback(err);
|
return callback(err);
|
||||||
}
|
}
|
||||||
if (authorizationResults) {
|
if (authorizationResults) {
|
||||||
const checkedResults = checkAuthResults(authorizationResults);
|
const checkedResults = checkAuthResults(apiMethod, authorizationResults, log);
|
||||||
if (checkedResults instanceof Error) {
|
if (checkedResults instanceof Error) {
|
||||||
return callback(checkedResults);
|
return callback(checkedResults);
|
||||||
}
|
}
|
||||||
|
@ -286,6 +246,42 @@ const api = {
|
||||||
return this[apiMethod](userInfo, request, log, callback);
|
return this[apiMethod](userInfo, request, log, callback);
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
callPostObject(request, response, log, callback) {
|
||||||
|
request.apiMethod = 'objectPost';
|
||||||
|
|
||||||
|
const requestContexts = prepareRequestContexts('objectPost', request,
|
||||||
|
undefined, undefined, undefined);
|
||||||
|
// Extract all the _apiMethods and store them in an array
|
||||||
|
const apiMethods = requestContexts ? requestContexts.map(context => context._apiMethod) : [];
|
||||||
|
// Attach the names to the current request
|
||||||
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
request.apiMethods = apiMethods;
|
||||||
|
|
||||||
|
return processPostForm(request, response, requestContexts, log,
|
||||||
|
(err, userInfo, authorizationResults, streamingV4Params) => {
|
||||||
|
if (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
if (authorizationResults) {
|
||||||
|
const checkedResults = checkAuthResults(authorizationResults);
|
||||||
|
if (checkedResults instanceof Error) {
|
||||||
|
return callback(checkedResults);
|
||||||
|
}
|
||||||
|
request.actionImplicitDenies = checkedResults.isImplicitDeny;
|
||||||
|
} else {
|
||||||
|
// create an object of keys apiMethods with all values to false:
|
||||||
|
// for backward compatibility, all apiMethods are allowed by default
|
||||||
|
// thus it is explicitly allowed, so implicit deny is false
|
||||||
|
request.actionImplicitDenies = apiMethods.reduce((acc, curr) => {
|
||||||
|
acc[curr] = false;
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
}
|
||||||
|
request._response = response;
|
||||||
|
return objectPost(userInfo, request, streamingV4Params,
|
||||||
|
log, callback, authorizationResults);
|
||||||
|
});
|
||||||
|
},
|
||||||
bucketDelete,
|
bucketDelete,
|
||||||
bucketDeleteCors,
|
bucketDeleteCors,
|
||||||
bucketDeleteEncryption,
|
bucketDeleteEncryption,
|
||||||
|
@ -337,6 +333,7 @@ const api = {
|
||||||
objectCopy,
|
objectCopy,
|
||||||
objectHead,
|
objectHead,
|
||||||
objectPut,
|
objectPut,
|
||||||
|
objectPost,
|
||||||
objectPutACL,
|
objectPutACL,
|
||||||
objectPutLegalHold,
|
objectPutLegalHold,
|
||||||
objectPutTagging,
|
objectPutTagging,
|
||||||
|
|
|
@ -0,0 +1,180 @@
|
||||||
|
const { auth, errors } = require('arsenal');
|
||||||
|
const busboy = require('@fastify/busboy');
|
||||||
|
const writeContinue = require('../../../utilities/writeContinue');
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const os = require('os');
|
||||||
|
|
||||||
|
// per doc: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-HTTPPOSTForms.html#HTTPPOSTFormDeclaration
|
||||||
|
const MAX_FIELD_SIZE = 20 * 1024; // 20KB
|
||||||
|
// per doc: https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html
|
||||||
|
const MAX_KEY_SIZE = 1024;
|
||||||
|
|
||||||
|
async function authenticateRequest(request, requestContexts, log) {
|
||||||
|
return new Promise(resolve => {
|
||||||
|
// TODO RING-45960 remove ignore for POST object here
|
||||||
|
auth.server.doAuth(request, log, (err, userInfo, authorizationResults, streamingV4Params) =>
|
||||||
|
resolve({ userInfo, authorizationResults, streamingV4Params }), 's3', requestContexts);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function parseFormData(request, response, requestContexts, log) {
|
||||||
|
/* eslint-disable no-param-reassign */
|
||||||
|
const formDataParser = busboy({ headers: request.headers });
|
||||||
|
writeContinue(request, response);
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
request.formData = {};
|
||||||
|
let totalFieldSize = 0;
|
||||||
|
let fileEventData = null;
|
||||||
|
let tempFileStream;
|
||||||
|
let tempFilePath;
|
||||||
|
let authResponse;
|
||||||
|
let fileWrittenPromiseResolve;
|
||||||
|
let formParserFinishedPromiseResolve;
|
||||||
|
|
||||||
|
const fileWrittenPromise = new Promise((res) => { fileWrittenPromiseResolve = res; });
|
||||||
|
const formParserFinishedPromise = new Promise((res) => { formParserFinishedPromiseResolve = res; });
|
||||||
|
|
||||||
|
formDataParser.on('field', (fieldname, val) => {
|
||||||
|
totalFieldSize += Buffer.byteLength(val, 'utf8');
|
||||||
|
if (totalFieldSize > MAX_FIELD_SIZE) {
|
||||||
|
return reject(errors.MaxPostPreDataLengthExceeded);
|
||||||
|
}
|
||||||
|
const lowerFieldname = fieldname.toLowerCase();
|
||||||
|
if (lowerFieldname === 'key') {
|
||||||
|
if (val.length > MAX_KEY_SIZE) {
|
||||||
|
return reject(errors.KeyTooLongError);
|
||||||
|
} else if (val.length === 0) {
|
||||||
|
return reject(errors.InvalidArgument
|
||||||
|
.customizeDescription('User key must have a length greater than 0.'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
request.formData[lowerFieldname] = val;
|
||||||
|
return undefined;
|
||||||
|
});
|
||||||
|
|
||||||
|
formDataParser.on('file', async (fieldname, file, filename, encoding, mimetype) => {
|
||||||
|
if (fileEventData) {
|
||||||
|
const err = errors.InvalidArgument
|
||||||
|
.customizeDescription('POST requires exactly one file upload per request.');
|
||||||
|
file.resume(); // Resume the stream to drain and discard the file
|
||||||
|
if (tempFilePath) {
|
||||||
|
fs.unlink(tempFilePath, unlinkErr => {
|
||||||
|
if (unlinkErr) {
|
||||||
|
log.error('Failed to delete temp file', { error: unlinkErr });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return reject(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
fileEventData = { fieldname, file, filename, encoding, mimetype };
|
||||||
|
if (!('key' in request.formData)) {
|
||||||
|
const err = errors.InvalidArgument
|
||||||
|
.customizeDescription('Bucket POST must contain a field named '
|
||||||
|
+ "'key'. If it is specified, please check the order of the fields.");
|
||||||
|
return reject(err);
|
||||||
|
}
|
||||||
|
// Replace `${filename}` with the actual filename
|
||||||
|
request.formData.key = request.formData.key.replace('${filename}', filename);
|
||||||
|
try {
|
||||||
|
// Authenticate request before streaming file
|
||||||
|
// TODO RING-45960 auth to be properly implemented
|
||||||
|
authResponse = await authenticateRequest(request, requestContexts, log);
|
||||||
|
|
||||||
|
// Create a temporary file to stream the file data
|
||||||
|
// This is to finalize validation on form data before storing the file
|
||||||
|
tempFilePath = path.join(os.tmpdir(), filename);
|
||||||
|
tempFileStream = fs.createWriteStream(tempFilePath);
|
||||||
|
|
||||||
|
file.pipe(tempFileStream);
|
||||||
|
|
||||||
|
tempFileStream.on('finish', () => {
|
||||||
|
request.fileEventData = { ...fileEventData, file: tempFilePath };
|
||||||
|
fileWrittenPromiseResolve();
|
||||||
|
});
|
||||||
|
|
||||||
|
tempFileStream.on('error', (err) => {
|
||||||
|
log.trace('Error streaming file to temporary location', { error: err });
|
||||||
|
reject(errors.InternalError);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for both file writing and form parsing to finish
|
||||||
|
return Promise.all([fileWrittenPromise, formParserFinishedPromise])
|
||||||
|
.then(() => resolve(authResponse))
|
||||||
|
.catch(reject);
|
||||||
|
} catch (err) {
|
||||||
|
return reject(err);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
formDataParser.on('finish', () => {
|
||||||
|
if (!fileEventData) {
|
||||||
|
const err = errors.InvalidArgument
|
||||||
|
.customizeDescription('POST requires exactly one file upload per request.');
|
||||||
|
return reject(err);
|
||||||
|
}
|
||||||
|
return formParserFinishedPromiseResolve();
|
||||||
|
});
|
||||||
|
|
||||||
|
formDataParser.on('error', (err) => {
|
||||||
|
log.trace('Error processing form data:', { error: err.description });
|
||||||
|
request.unpipe(formDataParser);
|
||||||
|
// Following observed AWS behaviour
|
||||||
|
reject(errors.MalformedPOSTRequest);
|
||||||
|
});
|
||||||
|
|
||||||
|
request.pipe(formDataParser);
|
||||||
|
return undefined;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function getFileStat(filePath) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
fs.stat(filePath, (err, stats) => {
|
||||||
|
if (err) {
|
||||||
|
return reject(err);
|
||||||
|
}
|
||||||
|
return resolve(stats);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processPostForm(request, response, requestContexts, log, callback) {
|
||||||
|
if (!request.headers || !request.headers['content-type'].includes('multipart/form-data')) {
|
||||||
|
const contentTypeError = errors.PreconditionFailed
|
||||||
|
.customizeDescription('Bucket POST must be of the enclosure-type multipart/form-data');
|
||||||
|
return process.nextTick(callback, contentTypeError);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const { userInfo, authorizationResults, streamingV4Params } =
|
||||||
|
await parseFormData(request, response, requestContexts, log);
|
||||||
|
|
||||||
|
const fileStat = await getFileStat(request.fileEventData.file);
|
||||||
|
request.parsedContentLength = fileStat.size;
|
||||||
|
request.fileEventData.file = fs.createReadStream(request.fileEventData.file);
|
||||||
|
if (request.formData['content-type']) {
|
||||||
|
request.headers['content-type'] = request.formData['content-type'];
|
||||||
|
} else {
|
||||||
|
request.headers['content-type'] = 'binary/octet-stream';
|
||||||
|
}
|
||||||
|
|
||||||
|
const authNames = { accountName: userInfo.getAccountDisplayName() };
|
||||||
|
if (userInfo.isRequesterAnIAMUser()) {
|
||||||
|
authNames.userName = userInfo.getIAMdisplayName();
|
||||||
|
}
|
||||||
|
log.addDefaultFields(authNames);
|
||||||
|
|
||||||
|
return callback(null, userInfo, authorizationResults, streamingV4Params);
|
||||||
|
} catch (err) {
|
||||||
|
return callback(err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
authenticateRequest,
|
||||||
|
parseFormData,
|
||||||
|
processPostForm,
|
||||||
|
getFileStat,
|
||||||
|
};
|
|
@ -576,6 +576,48 @@ function validatePolicyConditions(policy) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function checkAuthResults(apiMethod, authResults, log) {
|
||||||
|
let returnTagCount = true;
|
||||||
|
const isImplicitDeny = {};
|
||||||
|
let isOnlyImplicitDeny = true;
|
||||||
|
if (apiMethod === 'objectGet') {
|
||||||
|
// first item checks s3:GetObject(Version) action
|
||||||
|
if (!authResults[0].isAllowed && !authResults[0].isImplicit) {
|
||||||
|
log.trace('get object authorization denial from Vault');
|
||||||
|
return errors.AccessDenied;
|
||||||
|
}
|
||||||
|
// TODO add support for returnTagCount in the bucket policy
|
||||||
|
// checks
|
||||||
|
isImplicitDeny[authResults[0].action] = authResults[0].isImplicit;
|
||||||
|
// second item checks s3:GetObject(Version)Tagging action
|
||||||
|
if (!authResults[1].isAllowed) {
|
||||||
|
log.trace('get tagging authorization denial ' +
|
||||||
|
'from Vault');
|
||||||
|
returnTagCount = false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (let i = 0; i < authResults.length; i++) {
|
||||||
|
isImplicitDeny[authResults[i].action] = true;
|
||||||
|
if (!authResults[i].isAllowed && !authResults[i].isImplicit) {
|
||||||
|
// Any explicit deny rejects the current API call
|
||||||
|
log.trace('authorization denial from Vault');
|
||||||
|
return errors.AccessDenied;
|
||||||
|
}
|
||||||
|
if (authResults[i].isAllowed) {
|
||||||
|
// If the action is allowed, the result is not implicit
|
||||||
|
// Deny.
|
||||||
|
isImplicitDeny[authResults[i].action] = false;
|
||||||
|
isOnlyImplicitDeny = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// These two APIs cannot use ACLs or Bucket Policies, hence, any
|
||||||
|
// implicit deny from vault must be treated as an explicit deny.
|
||||||
|
if ((apiMethod === 'bucketPut' || apiMethod === 'serviceGet') && isOnlyImplicitDeny) {
|
||||||
|
return errors.AccessDenied;
|
||||||
|
}
|
||||||
|
return { returnTagCount, isImplicitDeny };
|
||||||
|
}
|
||||||
|
|
||||||
/** isLifecycleSession - check if it is the Lifecycle assumed role session arn.
|
/** isLifecycleSession - check if it is the Lifecycle assumed role session arn.
|
||||||
* @param {string} arn - Amazon resource name - example:
|
* @param {string} arn - Amazon resource name - example:
|
||||||
|
@ -607,6 +649,7 @@ module.exports = {
|
||||||
checkObjectAcls,
|
checkObjectAcls,
|
||||||
validatePolicyResource,
|
validatePolicyResource,
|
||||||
validatePolicyConditions,
|
validatePolicyConditions,
|
||||||
|
checkAuthResults,
|
||||||
isLifecycleSession,
|
isLifecycleSession,
|
||||||
evaluateBucketPolicyWithIAM,
|
evaluateBucketPolicyWithIAM,
|
||||||
};
|
};
|
||||||
|
|
|
@ -210,8 +210,18 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
|
||||||
metadataStoreParams.contentMD5 = constants.emptyFileMd5;
|
metadataStoreParams.contentMD5 = constants.emptyFileMd5;
|
||||||
return next(null, null, null);
|
return next(null, null, null);
|
||||||
}
|
}
|
||||||
return dataStore(objectKeyContext, cipherBundle, request, size,
|
// Object Post receives a file stream.
|
||||||
streamingV4Params, backendInfo, log, next);
|
// This is to be used to store data instead of the request stream itself.
|
||||||
|
|
||||||
|
let stream;
|
||||||
|
|
||||||
|
if (request.apiMethod === 'objectPost') {
|
||||||
|
stream = request.fileEventData ? request.fileEventData.file : undefined;
|
||||||
|
} else {
|
||||||
|
stream = request;
|
||||||
|
}
|
||||||
|
|
||||||
|
return dataStore(objectKeyContext, cipherBundle, stream, size, streamingV4Params, backendInfo, log, next);
|
||||||
},
|
},
|
||||||
function processDataResult(dataGetInfo, calculatedHash, next) {
|
function processDataResult(dataGetInfo, calculatedHash, next) {
|
||||||
if (dataGetInfo === null || dataGetInfo === undefined) {
|
if (dataGetInfo === null || dataGetInfo === undefined) {
|
||||||
|
|
|
@ -13,7 +13,7 @@ const V4Transform = require('../../../auth/streamingV4/V4Transform');
|
||||||
* the type of request requires them
|
* the type of request requires them
|
||||||
*/
|
*/
|
||||||
function prepareStream(stream, streamingV4Params, log, errCb) {
|
function prepareStream(stream, streamingV4Params, log, errCb) {
|
||||||
if (stream.headers['x-amz-content-sha256'] ===
|
if (stream && stream.headers && stream.headers['x-amz-content-sha256'] ===
|
||||||
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
|
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
|
||||||
if (typeof streamingV4Params !== 'object') {
|
if (typeof streamingV4Params !== 'object') {
|
||||||
// this might happen if the user provided a valid V2
|
// this might happen if the user provided a valid V2
|
||||||
|
|
|
@ -0,0 +1,118 @@
|
||||||
|
const async = require('async');
|
||||||
|
const { errors, versioning } = require('arsenal');
|
||||||
|
|
||||||
|
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||||
|
const createAndStoreObject = require('./apiUtils/object/createAndStoreObject');
|
||||||
|
const { standardMetadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||||
|
const { config } = require('../Config');
|
||||||
|
const { setExpirationHeaders } = require('./apiUtils/object/expirationHeaders');
|
||||||
|
const monitoring = require('../utilities/metrics');
|
||||||
|
const writeContinue = require('../utilities/writeContinue');
|
||||||
|
const { overheadField } = require('../../constants');
|
||||||
|
|
||||||
|
|
||||||
|
const versionIdUtils = versioning.VersionID;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST Object in the requested bucket. Steps include:
|
||||||
|
* validating metadata for authorization, bucket and object existence etc.
|
||||||
|
* store object data in datastore upon successful authorization
|
||||||
|
* store object location returned by datastore and
|
||||||
|
* object's (custom) headers in metadata
|
||||||
|
* return the result in final callback
|
||||||
|
*
|
||||||
|
* @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info
|
||||||
|
* @param {request} request - request object given by router,
|
||||||
|
* includes normalized headers
|
||||||
|
* @param {object | undefined } streamingV4Params - if v4 auth,
|
||||||
|
* object containing accessKey, signatureFromRequest, region, scopeDate,
|
||||||
|
* timestamp, and credentialScope
|
||||||
|
* (to be used for streaming v4 auth if applicable)
|
||||||
|
* @param {object} log - the log request
|
||||||
|
* @param {Function} callback - final callback to call with the result
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
function objectPost(authInfo, request, streamingV4Params, log, callback) {
|
||||||
|
const {
|
||||||
|
headers,
|
||||||
|
method,
|
||||||
|
formData,
|
||||||
|
bucketName,
|
||||||
|
} = request;
|
||||||
|
const requestType = request.apiMethods || 'objectPost';
|
||||||
|
const valParams = {
|
||||||
|
authInfo,
|
||||||
|
bucketName,
|
||||||
|
objectKey: formData.key,
|
||||||
|
requestType,
|
||||||
|
request,
|
||||||
|
};
|
||||||
|
const canonicalID = authInfo.getCanonicalID();
|
||||||
|
|
||||||
|
log.trace('owner canonicalID to send to data', { canonicalID });
|
||||||
|
return standardMetadataValidateBucketAndObj(valParams, request.actionImplicitDenies, log,
|
||||||
|
(err, bucket, objMD) => {
|
||||||
|
const responseHeaders = collectCorsHeaders(headers.origin,
|
||||||
|
method, bucket);
|
||||||
|
|
||||||
|
// TODO RING-45960 remove accessdenied skip
|
||||||
|
if (err && !err.AccessDenied) {
|
||||||
|
log.trace('error processing request', {
|
||||||
|
error: err,
|
||||||
|
method: 'metadataValidateBucketAndObj',
|
||||||
|
});
|
||||||
|
monitoring.promMetrics('POST', request.bucketName, err.code, 'postObject');
|
||||||
|
return callback(err, responseHeaders);
|
||||||
|
}
|
||||||
|
if (bucket.hasDeletedFlag() && canonicalID !== bucket.getOwner()) {
|
||||||
|
log.trace('deleted flag on bucket and request ' +
|
||||||
|
'from non-owner account');
|
||||||
|
monitoring.promMetrics('POST', request.bucketName, 404, 'postObject');
|
||||||
|
return callback(errors.NoSuchBucket);
|
||||||
|
}
|
||||||
|
|
||||||
|
return async.waterfall([
|
||||||
|
function objectCreateAndStore(next) {
|
||||||
|
writeContinue(request, request._response);
|
||||||
|
return createAndStoreObject(request.bucketName,
|
||||||
|
bucket, request.formData.key, objMD, authInfo, canonicalID, null,
|
||||||
|
request, false, streamingV4Params, overheadField, log, next);
|
||||||
|
},
|
||||||
|
], (err, storingResult) => {
|
||||||
|
if (err) {
|
||||||
|
monitoring.promMetrics('POST', request.bucketName, err.code,
|
||||||
|
'postObject');
|
||||||
|
return callback(err, responseHeaders);
|
||||||
|
}
|
||||||
|
setExpirationHeaders(responseHeaders, {
|
||||||
|
lifecycleConfig: bucket.getLifecycleConfiguration(),
|
||||||
|
objectParams: {
|
||||||
|
key: request.key,
|
||||||
|
date: storingResult.lastModified,
|
||||||
|
tags: storingResult.tags,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
if (storingResult) {
|
||||||
|
// ETag's hex should always be enclosed in quotes
|
||||||
|
responseHeaders.Key = request.formData.key;
|
||||||
|
responseHeaders.location = `/${bucketName}/${request.formData.key}`;
|
||||||
|
responseHeaders.Bucket = bucketName;
|
||||||
|
responseHeaders.ETag = `"${storingResult.contentMD5}"`;
|
||||||
|
}
|
||||||
|
const vcfg = bucket.getVersioningConfiguration();
|
||||||
|
const isVersionedObj = vcfg && vcfg.Status === 'Enabled';
|
||||||
|
if (isVersionedObj) {
|
||||||
|
if (storingResult && storingResult.versionId) {
|
||||||
|
responseHeaders['x-amz-version-id'] =
|
||||||
|
versionIdUtils.encode(storingResult.versionId,
|
||||||
|
config.versionIdEncodingType);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return callback(null, responseHeaders);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = objectPost;
|
|
@ -19,8 +19,9 @@
|
||||||
},
|
},
|
||||||
"homepage": "https://github.com/scality/S3#readme",
|
"homepage": "https://github.com/scality/S3#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@fastify/busboy": "^2.1.1",
|
||||||
"@hapi/joi": "^17.1.0",
|
"@hapi/joi": "^17.1.0",
|
||||||
"arsenal": "git+https://github.com/scality/arsenal#7.70.29",
|
"arsenal": "git+https://github.com/scality/arsenal#61984fbac3721d72cfc05a0cba0a0965c158008d",
|
||||||
"async": "~2.5.0",
|
"async": "~2.5.0",
|
||||||
"aws-sdk": "2.905.0",
|
"aws-sdk": "2.905.0",
|
||||||
"azure-storage": "^2.1.0",
|
"azure-storage": "^2.1.0",
|
||||||
|
@ -60,6 +61,9 @@
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"ft_awssdk": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/",
|
"ft_awssdk": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/",
|
||||||
|
"ft_post": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/object/post.js",
|
||||||
|
"ft_post_aws": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/object/post-copy.js",
|
||||||
|
"ft_post_unit": "CI=true S3BACKEND=mem mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json --recursive tests/unit/api/callPostMethod.js",
|
||||||
"ft_awssdk_aws": "cd tests/functional/aws-node-sdk && AWS_ON_AIR=true mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/",
|
"ft_awssdk_aws": "cd tests/functional/aws-node-sdk && AWS_ON_AIR=true mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/",
|
||||||
"ft_awssdk_buckets": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/bucket",
|
"ft_awssdk_buckets": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/bucket",
|
||||||
"ft_awssdk_objects_misc": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/legacy test/object test/service test/support",
|
"ft_awssdk_objects_misc": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/legacy test/object test/service test/support",
|
||||||
|
|
|
@ -0,0 +1,906 @@
|
||||||
|
|
||||||
|
const xml2js = require('xml2js');
|
||||||
|
const axios = require('axios');
|
||||||
|
const crypto = require('crypto');
|
||||||
|
const FormData = require('form-data');
|
||||||
|
const assert = require('assert');
|
||||||
|
|
||||||
|
const BucketUtility = require('../../lib/utility/bucket-util');
|
||||||
|
const getConfig = require('../support/config');
|
||||||
|
|
||||||
|
let bucketName;
|
||||||
|
const filename = 'test-file.txt';
|
||||||
|
let fileBuffer;
|
||||||
|
const region = 'us-east-1';
|
||||||
|
let ak;
|
||||||
|
let sk;
|
||||||
|
let s3;
|
||||||
|
|
||||||
|
const generateBucketName = () => `test-bucket-${crypto.randomBytes(8).toString('hex')}`;
|
||||||
|
|
||||||
|
const formatDate = (date) => {
|
||||||
|
const year = date.getUTCFullYear();
|
||||||
|
const month = (date.getUTCMonth() + 1).toString().padStart(2, '0');
|
||||||
|
const day = date.getUTCDate().toString().padStart(2, '0');
|
||||||
|
return `${year}${month}${day}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
const getSignatureKey = (key, dateStamp, regionName, serviceName) => {
|
||||||
|
const kDate = crypto.createHmac('sha256', `AWS4${key}`).update(dateStamp).digest();
|
||||||
|
const kRegion = crypto.createHmac('sha256', kDate).update(regionName).digest();
|
||||||
|
const kService = crypto.createHmac('sha256', kRegion).update(serviceName).digest();
|
||||||
|
const kSigning = crypto.createHmac('sha256', kService).update('aws4_request').digest();
|
||||||
|
return kSigning;
|
||||||
|
};
|
||||||
|
|
||||||
|
// 'additionalConditions' will also replace existing keys if they are present
|
||||||
|
const calculateFields = (ak, sk, additionalConditions, bucket = bucketName, key = filename) => {
|
||||||
|
const service = 's3';
|
||||||
|
|
||||||
|
const now = new Date();
|
||||||
|
const formattedDate = now.toISOString().replace(/[:-]|\.\d{3}/g, '');
|
||||||
|
let shortFormattedDate = formatDate(now);
|
||||||
|
|
||||||
|
const credential = `${ak}/${shortFormattedDate}/${region}/${service}/aws4_request`;
|
||||||
|
const conditionsFields = [
|
||||||
|
{ bucket },
|
||||||
|
{ key },
|
||||||
|
{ 'x-amz-credential': credential },
|
||||||
|
{ 'x-amz-algorithm': 'AWS4-HMAC-SHA256' },
|
||||||
|
{ 'x-amz-date': formattedDate },
|
||||||
|
];
|
||||||
|
if (additionalConditions) {
|
||||||
|
additionalConditions.forEach(field => {
|
||||||
|
const key = Object.keys(field)[0];
|
||||||
|
const value = field[key];
|
||||||
|
const index = conditionsFields.findIndex(condition => condition.hasOwnProperty(key));
|
||||||
|
if (index !== -1) {
|
||||||
|
conditionsFields[index][key] = value;
|
||||||
|
if (key === 'x-amz-date') {
|
||||||
|
shortFormattedDate = value.split('T')[0];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
conditionsFields.push({ [key]: value });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const policy = {
|
||||||
|
expiration: new Date(new Date().getTime() + 60000).toISOString(),
|
||||||
|
conditions: conditionsFields,
|
||||||
|
};
|
||||||
|
const policyBase64 = Buffer.from(JSON.stringify(policy)).toString('base64');
|
||||||
|
|
||||||
|
const signingKey = getSignatureKey(sk, shortFormattedDate, region, service);
|
||||||
|
const signature = crypto.createHmac('sha256', signingKey).update(policyBase64).digest('hex');
|
||||||
|
|
||||||
|
const returnFields = [
|
||||||
|
{ name: 'x-amz-credential', value: credential },
|
||||||
|
{ name: 'x-amz-algorithm', value: 'AWS4-HMAC-SHA256' },
|
||||||
|
{ name: 'x-amz-signature', value: signature },
|
||||||
|
{ name: 'x-amz-date', value: formattedDate },
|
||||||
|
{ name: 'policy', value: policyBase64 },
|
||||||
|
{ name: 'bucket', value: bucket },
|
||||||
|
{ name: 'key', value: key },
|
||||||
|
];
|
||||||
|
if (!additionalConditions) {
|
||||||
|
return returnFields;
|
||||||
|
}
|
||||||
|
if (additionalConditions) {
|
||||||
|
additionalConditions.forEach(field => {
|
||||||
|
const key = Object.keys(field)[0];
|
||||||
|
const value = field[key];
|
||||||
|
const index = returnFields.findIndex(f => f.name === key);
|
||||||
|
if (index !== -1) {
|
||||||
|
returnFields[index].value = value;
|
||||||
|
} else {
|
||||||
|
returnFields.push({ name: key, value });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return returnFields;
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('POST object', () => {
|
||||||
|
let bucketUtil;
|
||||||
|
let config;
|
||||||
|
const testContext = {};
|
||||||
|
|
||||||
|
before(() => {
|
||||||
|
config = getConfig('default');
|
||||||
|
ak = config.credentials.accessKeyId;
|
||||||
|
sk = config.credentials.secretAccessKey;
|
||||||
|
bucketUtil = new BucketUtility('default');
|
||||||
|
s3 = bucketUtil.s3;
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(done => {
|
||||||
|
bucketName = generateBucketName();
|
||||||
|
const url = `${config.endpoint}/${bucketName}`;
|
||||||
|
testContext.bucketName = bucketName;
|
||||||
|
testContext.url = url;
|
||||||
|
|
||||||
|
const fileContent = 'This is a test file';
|
||||||
|
fileBuffer = Buffer.from(fileContent);
|
||||||
|
|
||||||
|
// Create the bucket
|
||||||
|
s3.createBucket({ Bucket: bucketName }, err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
afterEach(done => {
|
||||||
|
const { bucketName } = testContext;
|
||||||
|
|
||||||
|
process.stdout.write('Emptying bucket\n');
|
||||||
|
bucketUtil.empty(bucketName)
|
||||||
|
.then(() => {
|
||||||
|
process.stdout.write('Deleting bucket\n');
|
||||||
|
return bucketUtil.deleteOne(bucketName);
|
||||||
|
})
|
||||||
|
.then(() => done())
|
||||||
|
.catch(err => {
|
||||||
|
if (err.code !== 'NoSuchBucket') {
|
||||||
|
process.stdout.write('Error in afterEach\n');
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should successfully upload an object using a POST form', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
const fields = calculateFields(ak, sk);
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
formData.append('file', fileBuffer, { filename });
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(response => {
|
||||||
|
assert.equal(response.status, 204);
|
||||||
|
assert.equal(response.headers.location, `/${bucketName}/${filename}`);
|
||||||
|
assert.equal(response.headers.key, filename);
|
||||||
|
assert.equal(response.headers.bucket, bucketName);
|
||||||
|
done();
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle error when bucket does not exist', done => {
|
||||||
|
const fakeBucketName = generateBucketName();
|
||||||
|
const tempUrl = `${config.endpoint}/${fakeBucketName}`;
|
||||||
|
const fields = calculateFields(ak, sk, [], fakeBucketName);
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
const fileContent = 'This is a test file';
|
||||||
|
const fileBuffer = Buffer.from(fileContent);
|
||||||
|
|
||||||
|
formData.append('file', fileBuffer, { filename });
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
return axios.post(tempUrl, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
done(new Error('Expected error but got success response'));
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
assert.equal(err.response.status, 404);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should successfully upload a larger file to S3 using a POST form', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
const largeFileName = 'large-test-file.txt';
|
||||||
|
const largeFileContent = 'This is a larger test file'.repeat(10000); // Simulate a larger file
|
||||||
|
const largeFileBuffer = Buffer.from(largeFileContent);
|
||||||
|
|
||||||
|
const fields = calculateFields(ak, sk, [{ key: largeFileName }]);
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
formData.append('file', largeFileBuffer, { filename: largeFileName });
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(response => {
|
||||||
|
assert.equal(response.status, 204);
|
||||||
|
s3.listObjectsV2({ Bucket: bucketName }, (err, data) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const uploadedFile = data.Contents.find(item => item.Key === largeFileName);
|
||||||
|
assert(uploadedFile, 'Uploaded file should exist in the bucket');
|
||||||
|
assert.equal(uploadedFile.Size, Buffer.byteLength(largeFileContent), 'File size should match');
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should be able to post an empty file and verify its existence', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
const fields = calculateFields(ak, sk);
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
const emptyFileBuffer = Buffer.from(''); // Create a buffer for an empty file
|
||||||
|
|
||||||
|
formData.append('file', emptyFileBuffer, filename);
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(response => {
|
||||||
|
assert.equal(response.status, 204);
|
||||||
|
|
||||||
|
// Check if the object exists using listObjects
|
||||||
|
s3.listObjectsV2({ Bucket: bucketName, Prefix: filename }, (err, data) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const fileExists = data.Contents.some(item => item.Key === filename);
|
||||||
|
const file = data.Contents.find(item => item.Key === filename);
|
||||||
|
|
||||||
|
assert(fileExists, 'File should exist in S3');
|
||||||
|
assert.equal(file.Size, 0, 'File size should be 0');
|
||||||
|
|
||||||
|
// Clean up: delete the empty file from S3
|
||||||
|
return s3.deleteObject({ Bucket: bucketName, Key: filename }, err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle error when file is missing', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
const fields = calculateFields(ak, sk);
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
done(new Error('Expected error but got success response'));
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
assert.equal(err.response.status, 400);
|
||||||
|
xml2js.parseString(err.response.data, (parseErr, result) => {
|
||||||
|
if (parseErr) {
|
||||||
|
return done(parseErr);
|
||||||
|
}
|
||||||
|
|
||||||
|
const error = result.Error;
|
||||||
|
assert.equal(error.Code[0], 'InvalidArgument');
|
||||||
|
assert.equal(error.Message[0], 'POST requires exactly one file upload per request.');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle error when there are multiple files', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
const fields = calculateFields(ak, sk);
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Append the same buffer twice to simulate multiple files
|
||||||
|
formData.append('file', fileBuffer, { filename });
|
||||||
|
formData.append('file', fileBuffer, { filename });
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
done(new Error('Expected error but got success response'));
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
assert.equal(err.response.status, 400);
|
||||||
|
xml2js.parseString(err.response.data, (parseErr, result) => {
|
||||||
|
if (parseErr) {
|
||||||
|
return done(parseErr);
|
||||||
|
}
|
||||||
|
|
||||||
|
const error = result.Error;
|
||||||
|
assert.equal(error.Code[0], 'InvalidArgument');
|
||||||
|
assert.equal(error.Message[0], 'POST requires exactly one file upload per request.');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should handle error when key is missing', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
// Prep fields then remove the key field
|
||||||
|
let fields = calculateFields(ak, sk);
|
||||||
|
fields = fields.filter(e => e.name !== 'key');
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
const fileContent = 'This is a test file';
|
||||||
|
const fileBuffer = Buffer.from(fileContent);
|
||||||
|
|
||||||
|
formData.append('file', fileBuffer, { filename });
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
done(new Error('Request should not succeed without key field'));
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
assert.ok(err.response, 'Error should be returned by axios');
|
||||||
|
|
||||||
|
xml2js.parseString(err.response.data, (parseErr, result) => {
|
||||||
|
if (parseErr) {
|
||||||
|
return done(parseErr);
|
||||||
|
}
|
||||||
|
|
||||||
|
const error = result.Error;
|
||||||
|
assert.equal(error.Code[0], 'InvalidArgument');
|
||||||
|
assert.equal(error.Message[0],
|
||||||
|
"Bucket POST must contain a field named 'key'. "
|
||||||
|
+ 'If it is specified, please check the order of the fields.');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle error when content-type is incorrect', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
// Prep fields then remove the key field
|
||||||
|
let fields = calculateFields(ak, sk);
|
||||||
|
fields = fields.filter(e => e.name !== 'key');
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
formData.append('file', fileBuffer, filename);
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const headers = {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
};
|
||||||
|
headers['content-type'] = 'application/json';
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers,
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
done(new Error('Request should not succeed wrong content-type'));
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
assert.ok(err.response, 'Error should be returned by axios');
|
||||||
|
|
||||||
|
xml2js.parseString(err.response.data, (err, result) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const error = result.Error;
|
||||||
|
assert.equal(error.Code[0], 'PreconditionFailed');
|
||||||
|
assert.equal(error.Message[0],
|
||||||
|
'Bucket POST must be of the enclosure-type multipart/form-data');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should handle error when content-type is missing', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
// Prep fields then remove the key field
|
||||||
|
let fields = calculateFields(ak, sk);
|
||||||
|
fields = fields.filter(e => e.name !== 'key');
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
formData.append('file', fileBuffer, filename);
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const headers = {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
};
|
||||||
|
delete headers['content-type'];
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers,
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
done(new Error('Request should not succeed without correct content-type'));
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
assert.ok(err.response, 'Error should be returned by axios');
|
||||||
|
|
||||||
|
xml2js.parseString(err.response.data, (err, result) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const error = result.Error;
|
||||||
|
assert.equal(error.Code[0], 'PreconditionFailed');
|
||||||
|
assert.equal(error.Message[0],
|
||||||
|
'Bucket POST must be of the enclosure-type multipart/form-data');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should upload an object with key slash', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
const slashKey = '/';
|
||||||
|
const fields = calculateFields(ak, sk, [{ key: slashKey }]);
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
formData.append('file', fileBuffer, filename);
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(response => {
|
||||||
|
assert.equal(response.status, 204);
|
||||||
|
done();
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fail to upload an object with key length of 0', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
const fields = calculateFields(ak, sk, [
|
||||||
|
{ key: '' },
|
||||||
|
]);
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
formData.append('file', fileBuffer, filename);
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use an incorrect content length (e.g., actual length - 20)
|
||||||
|
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(() => done(new Error('Request should have failed but succeeded')))
|
||||||
|
.catch(err => {
|
||||||
|
// Expecting an error response from the API
|
||||||
|
assert.equal(err.response.status, 400);
|
||||||
|
xml2js.parseString(err.response.data, (err, result) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const error = result.Error;
|
||||||
|
assert.equal(error.Code[0], 'InvalidArgument');
|
||||||
|
assert.equal(error.Message[0],
|
||||||
|
'User key must have a length greater than 0.');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fail to upload an object with key longer than 1024 bytes', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
const fields = calculateFields(ak, sk, [
|
||||||
|
{ key: 'a'.repeat(1025) },
|
||||||
|
]);
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
formData.append('file', fileBuffer, filename);
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use an incorrect content length (e.g., actual length - 20)
|
||||||
|
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
// The request should fail, so we shouldn't get here
|
||||||
|
done(new Error('Request should have failed but succeeded'));
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
// Expecting an error response from the API
|
||||||
|
assert.equal(err.response.status, 400);
|
||||||
|
xml2js.parseString(err.response.data, (err, result) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const error = result.Error;
|
||||||
|
assert.equal(error.Code[0], 'KeyTooLongError');
|
||||||
|
assert.equal(error.Message[0],
|
||||||
|
'Your key is too long.');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should replace ${filename} variable in key with the name of the uploaded file', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
const keyTemplate = 'uploads/test/${filename}';
|
||||||
|
const fileToUpload = keyTemplate.replace('${filename}', filename);
|
||||||
|
const fields = calculateFields(ak, sk, [{ key: fileToUpload }]);
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
const value = field.name === 'key' ? keyTemplate : field.value;
|
||||||
|
formData.append(field.name, value);
|
||||||
|
});
|
||||||
|
|
||||||
|
formData.append('file', fileBuffer, filename);
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) return done(err);
|
||||||
|
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(response => {
|
||||||
|
assert.equal(response.status, 204);
|
||||||
|
const expectedKey = keyTemplate.replace('${filename}', filename);
|
||||||
|
|
||||||
|
const listParams = { Bucket: bucketName, Prefix: expectedKey };
|
||||||
|
return s3.listObjects(listParams, (err, data) => {
|
||||||
|
if (err) return done(err);
|
||||||
|
const objectExists = data.Contents.some(item => item.Key === expectedKey);
|
||||||
|
assert(objectExists, 'Object was not uploaded with the expected key');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.catch(done);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fail to upload an object with an invalid multipart boundary', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
const fields = calculateFields(ak, sk);
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
formData.append('file', fileBuffer, filename);
|
||||||
|
|
||||||
|
// Generate the form data with a valid boundary
|
||||||
|
const validBoundary = formData.getBoundary();
|
||||||
|
|
||||||
|
// Manually create the payload with an invalid boundary
|
||||||
|
const invalidBoundary = '----InvalidBoundary';
|
||||||
|
const payload = Buffer.concat([
|
||||||
|
Buffer.from(`--${invalidBoundary}\r\n`),
|
||||||
|
Buffer.from(`Content-Disposition: form-data; name="key"\r\n\r\n${filename}\r\n`),
|
||||||
|
Buffer.from(`--${invalidBoundary}\r\n`),
|
||||||
|
Buffer.from(`Content-Disposition: form-data; name="file"; filename="${filename}"\r\n`),
|
||||||
|
Buffer.from('Content-Type: application/octet-stream\r\n\r\n'),
|
||||||
|
fileBuffer,
|
||||||
|
Buffer.from(`\r\n--${invalidBoundary}--\r\n`),
|
||||||
|
]);
|
||||||
|
|
||||||
|
// Create an axios instance with invalid headers
|
||||||
|
axios.post(url, payload, {
|
||||||
|
headers: {
|
||||||
|
'Content-Type': `multipart/form-data; boundary=${validBoundary}`,
|
||||||
|
'Content-Length': payload.length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
// The request should fail, so we shouldn't get here
|
||||||
|
done(new Error('Request should have failed but succeeded'));
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
// Expecting an error response from the API
|
||||||
|
assert.equal(err.response.status, 400);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should fail to upload an object with an too small content length header', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
const fields = calculateFields(ak, sk);
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
formData.append('file', fileBuffer, filename);
|
||||||
|
|
||||||
|
formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use an incorrect content length (e.g., actual length - 20)
|
||||||
|
const incorrectLength = length - 20;
|
||||||
|
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': incorrectLength,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(() => done(new Error('Request should have failed but succeeded')))
|
||||||
|
.catch(err => {
|
||||||
|
// Expecting an error response from the API
|
||||||
|
assert.equal(err.response.status, 400);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return an error if form data (excluding file) exceeds 20KB', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
const fields = calculateFields(ak, sk);
|
||||||
|
|
||||||
|
// Add additional fields to make form data exceed 20KB
|
||||||
|
const largeValue = 'A'.repeat(1024); // 1KB value
|
||||||
|
for (let i = 0; i < 21; i++) { // Add 21 fields of 1KB each to exceed 20KB
|
||||||
|
fields.push({ name: `field${i}`, value: largeValue });
|
||||||
|
}
|
||||||
|
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
formData.append('file', fileBuffer, filename);
|
||||||
|
|
||||||
|
return formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(() => {
|
||||||
|
done(new Error('Request should not succeed with form data exceeding 20KB'));
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
assert.ok(err.response, 'Error should be returned by axios');
|
||||||
|
|
||||||
|
xml2js.parseString(err.response.data, (err, result) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const error = result.Error;
|
||||||
|
assert.equal(error.Code[0], 'MaxPostPreDataLengthExceeded');
|
||||||
|
assert.equal(error.Message[0],
|
||||||
|
'Your POST request fields preceeding the upload file was too large.');
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should successfully upload an object with bucket versioning enabled and verify version ID', done => {
|
||||||
|
const { url } = testContext;
|
||||||
|
|
||||||
|
// Enable versioning on the bucket
|
||||||
|
const versioningParams = {
|
||||||
|
Bucket: bucketName,
|
||||||
|
VersioningConfiguration: {
|
||||||
|
Status: 'Enabled',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return s3.putBucketVersioning(versioningParams, (err) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
const fields = calculateFields(ak, sk, [{ bucket: bucketName }]);
|
||||||
|
const formData = new FormData();
|
||||||
|
|
||||||
|
fields.forEach(field => {
|
||||||
|
formData.append(field.name, field.value);
|
||||||
|
});
|
||||||
|
|
||||||
|
formData.append('file', fileBuffer, filename);
|
||||||
|
|
||||||
|
return formData.getLength((err, length) => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
return axios.post(url, formData, {
|
||||||
|
headers: {
|
||||||
|
...formData.getHeaders(),
|
||||||
|
'Content-Length': length,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
.then(response => {
|
||||||
|
assert.equal(response.status, 204);
|
||||||
|
|
||||||
|
// Verify version ID is present in the response
|
||||||
|
const versionId = response.headers['x-amz-version-id'];
|
||||||
|
assert.ok(versionId, 'Version ID should be present in the response headers');
|
||||||
|
done();
|
||||||
|
})
|
||||||
|
.catch(err => {
|
||||||
|
done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
|
@ -0,0 +1,433 @@
|
||||||
|
const assert = require('assert');
|
||||||
|
const async = require('async');
|
||||||
|
const moment = require('moment');
|
||||||
|
const { errors } = require('arsenal');
|
||||||
|
const sinon = require('sinon');
|
||||||
|
|
||||||
|
const { bucketPut } = require('../../../lib/api/bucketPut');
|
||||||
|
const bucketPutObjectLock = require('../../../lib/api/bucketPutObjectLock');
|
||||||
|
const bucketPutVersioning = require('../../../lib/api/bucketPutVersioning');
|
||||||
|
const { cleanup, DummyRequestLogger, makeAuthInfo, versioningTestUtils }
|
||||||
|
= require('../helpers');
|
||||||
|
const { ds } = require('arsenal').storage.data.inMemory.datastore;
|
||||||
|
const metadata = require('../metadataswitch');
|
||||||
|
const objectPost = require('../../../lib/api/objectPost');
|
||||||
|
const { objectLockTestUtils } = require('../helpers');
|
||||||
|
const DummyRequest = require('../DummyRequest');
|
||||||
|
const mpuUtils = require('../utils/mpuUtils');
|
||||||
|
|
||||||
|
const any = sinon.match.any;
|
||||||
|
|
||||||
|
const log = new DummyRequestLogger();
|
||||||
|
const canonicalID = 'accessKey1';
|
||||||
|
const authInfo = makeAuthInfo(canonicalID);
|
||||||
|
const bucketName = 'bucketname123';
|
||||||
|
const postBody = Buffer.from('I am a body', 'utf8');
|
||||||
|
const correctMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a';
|
||||||
|
const mockDate = new Date(2050, 10, 12);
|
||||||
|
const testPutBucketRequest = new DummyRequest({
|
||||||
|
bucketName,
|
||||||
|
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||||
|
url: '/',
|
||||||
|
});
|
||||||
|
const testPutBucketRequestLock = new DummyRequest({
|
||||||
|
bucketName,
|
||||||
|
headers: {
|
||||||
|
'host': `${bucketName}.s3.amazonaws.com`,
|
||||||
|
'x-amz-bucket-object-lock-enabled': 'true',
|
||||||
|
},
|
||||||
|
url: '/',
|
||||||
|
});
|
||||||
|
|
||||||
|
const originalputObjectMD = metadata.putObjectMD;
|
||||||
|
const objectName = 'objectName';
|
||||||
|
|
||||||
|
let testPostObjectRequest;
|
||||||
|
const enableVersioningRequest =
|
||||||
|
versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Enabled');
|
||||||
|
const suspendVersioningRequest =
|
||||||
|
versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Suspended');
|
||||||
|
|
||||||
|
|
||||||
|
describe('objectPost API', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
cleanup();
|
||||||
|
sinon.spy(metadata, 'putObjectMD');
|
||||||
|
testPostObjectRequest = new DummyRequest({
|
||||||
|
bucketName,
|
||||||
|
formData: {
|
||||||
|
key: objectName,
|
||||||
|
},
|
||||||
|
fileEventData: {},
|
||||||
|
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||||
|
url: '/',
|
||||||
|
}, postBody);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
sinon.restore();
|
||||||
|
metadata.putObjectMD = originalputObjectMD;
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return an error if the bucket does not exist', done => {
|
||||||
|
objectPost(authInfo, testPostObjectRequest, undefined, log, err => {
|
||||||
|
assert.deepStrictEqual(err, errors.NoSuchBucket);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should successfully post an object', done => {
|
||||||
|
const testPostObjectRequest = new DummyRequest({
|
||||||
|
bucketName,
|
||||||
|
formData: {
|
||||||
|
key: objectName,
|
||||||
|
},
|
||||||
|
fileEventData: {},
|
||||||
|
headers: {},
|
||||||
|
url: '/',
|
||||||
|
calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==',
|
||||||
|
}, postBody);
|
||||||
|
|
||||||
|
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||||
|
objectPost(authInfo, testPostObjectRequest, undefined, log,
|
||||||
|
(err, resHeaders) => {
|
||||||
|
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
|
||||||
|
metadata.getObjectMD(bucketName, objectName,
|
||||||
|
{}, log, (err, md) => {
|
||||||
|
assert(md);
|
||||||
|
assert
|
||||||
|
.strictEqual(md['content-md5'], correctMD5);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const mockModes = ['GOVERNANCE', 'COMPLIANCE'];
|
||||||
|
mockModes.forEach(mockMode => {
|
||||||
|
it(`should post an object with valid date & ${mockMode} mode`, done => {
|
||||||
|
const testPostObjectRequest = new DummyRequest({
|
||||||
|
bucketName,
|
||||||
|
formData: {
|
||||||
|
key: objectName,
|
||||||
|
},
|
||||||
|
fileEventData: {},
|
||||||
|
headers: {
|
||||||
|
'x-amz-object-lock-retain-until-date': mockDate,
|
||||||
|
'x-amz-object-lock-mode': mockMode,
|
||||||
|
},
|
||||||
|
url: '/',
|
||||||
|
calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==',
|
||||||
|
}, postBody);
|
||||||
|
bucketPut(authInfo, testPutBucketRequestLock, log, () => {
|
||||||
|
objectPost(authInfo, testPostObjectRequest, undefined, log,
|
||||||
|
(err, headers) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(headers.ETag, `"${correctMD5}"`);
|
||||||
|
metadata.getObjectMD(bucketName, objectName, {}, log,
|
||||||
|
(err, md) => {
|
||||||
|
const mode = md.retentionMode;
|
||||||
|
const retainUntilDate = md.retentionDate;
|
||||||
|
assert.ifError(err);
|
||||||
|
assert(md);
|
||||||
|
assert.strictEqual(mode, mockMode);
|
||||||
|
assert.strictEqual(retainUntilDate, mockDate);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const formatTime = time => time.slice(0, 20);
|
||||||
|
|
||||||
|
const testObjectLockConfigs = [
|
||||||
|
{
|
||||||
|
testMode: 'COMPLIANCE',
|
||||||
|
val: 30,
|
||||||
|
type: 'Days',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
testMode: 'GOVERNANCE',
|
||||||
|
val: 5,
|
||||||
|
type: 'Years',
|
||||||
|
},
|
||||||
|
];
|
||||||
|
testObjectLockConfigs.forEach(config => {
|
||||||
|
const { testMode, type, val } = config;
|
||||||
|
it('should put an object with default retention if object does not ' +
|
||||||
|
'have retention configuration but bucket has', done => {
|
||||||
|
const testPostObjectRequest = new DummyRequest({
|
||||||
|
bucketName,
|
||||||
|
formData: {
|
||||||
|
key: objectName,
|
||||||
|
},
|
||||||
|
fileEventData: {},
|
||||||
|
headers: {},
|
||||||
|
url: '/',
|
||||||
|
calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==',
|
||||||
|
}, postBody);
|
||||||
|
|
||||||
|
const testObjLockRequest = {
|
||||||
|
bucketName,
|
||||||
|
headers: { host: `${bucketName}.s3.amazonaws.com` },
|
||||||
|
post: objectLockTestUtils.generateXml(testMode, val, type),
|
||||||
|
};
|
||||||
|
|
||||||
|
bucketPut(authInfo, testPutBucketRequestLock, log, () => {
|
||||||
|
bucketPutObjectLock(authInfo, testObjLockRequest, log, () => {
|
||||||
|
objectPost(authInfo, testPostObjectRequest, undefined, log,
|
||||||
|
(err, headers) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(headers.ETag, `"${correctMD5}"`);
|
||||||
|
metadata.getObjectMD(bucketName, objectName, {},
|
||||||
|
log, (err, md) => {
|
||||||
|
const mode = md.retentionMode;
|
||||||
|
const retainDate = md.retentionDate;
|
||||||
|
const date = moment();
|
||||||
|
const days
|
||||||
|
= type === 'Days' ? val : val * 365;
|
||||||
|
const expectedDate
|
||||||
|
= date.add(days, 'days');
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(mode, testMode);
|
||||||
|
assert.strictEqual(formatTime(retainDate),
|
||||||
|
formatTime(expectedDate.toISOString()));
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
it('should successfully put an object with legal hold ON', done => {
|
||||||
|
const request = new DummyRequest({
|
||||||
|
bucketName,
|
||||||
|
formData: {
|
||||||
|
key: objectName,
|
||||||
|
},
|
||||||
|
fileEventData: {},
|
||||||
|
headers: {
|
||||||
|
'x-amz-object-lock-legal-hold': 'ON',
|
||||||
|
},
|
||||||
|
url: '/',
|
||||||
|
calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==',
|
||||||
|
}, postBody);
|
||||||
|
|
||||||
|
bucketPut(authInfo, testPutBucketRequestLock, log, () => {
|
||||||
|
objectPost(authInfo, request, undefined, log, (err, headers) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(headers.ETag, `"${correctMD5}"`);
|
||||||
|
metadata.getObjectMD(bucketName, objectName, {}, log,
|
||||||
|
(err, md) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(md.legalHold, true);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should successfully put an object with legal hold OFF', done => {
|
||||||
|
const request = new DummyRequest({
|
||||||
|
bucketName,
|
||||||
|
formData: {
|
||||||
|
key: objectName,
|
||||||
|
},
|
||||||
|
fileEventData: {},
|
||||||
|
headers: {
|
||||||
|
'x-amz-object-lock-legal-hold': 'OFF',
|
||||||
|
},
|
||||||
|
url: '/',
|
||||||
|
calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==',
|
||||||
|
}, postBody);
|
||||||
|
|
||||||
|
bucketPut(authInfo, testPutBucketRequestLock, log, () => {
|
||||||
|
objectPost(authInfo, request, undefined, log, (err, headers) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert.strictEqual(headers.ETag, `"${correctMD5}"`);
|
||||||
|
metadata.getObjectMD(bucketName, objectName, {}, log,
|
||||||
|
(err, md) => {
|
||||||
|
assert.ifError(err);
|
||||||
|
assert(md);
|
||||||
|
assert.strictEqual(md.legalHold, false);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not leave orphans in data when overwriting an object', done => {
|
||||||
|
const testPostObjectRequest2 = new DummyRequest({
|
||||||
|
bucketName,
|
||||||
|
formData: {
|
||||||
|
key: objectName,
|
||||||
|
},
|
||||||
|
fileEventData: {},
|
||||||
|
headers: {},
|
||||||
|
url: '/',
|
||||||
|
}, Buffer.from('I am another body', 'utf8'));
|
||||||
|
|
||||||
|
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||||
|
objectPost(authInfo, testPostObjectRequest,
|
||||||
|
undefined, log, () => {
|
||||||
|
objectPost(authInfo, testPostObjectRequest2, undefined,
|
||||||
|
log,
|
||||||
|
() => {
|
||||||
|
// orphan objects don't get deleted
|
||||||
|
// until the next tick
|
||||||
|
// in memory
|
||||||
|
setImmediate(() => {
|
||||||
|
// Data store starts at index 1
|
||||||
|
assert.strictEqual(ds[0], undefined);
|
||||||
|
assert.strictEqual(ds[1], undefined);
|
||||||
|
assert.deepStrictEqual(ds[2].value,
|
||||||
|
Buffer.from('I am another body', 'utf8'));
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should not leave orphans in data when overwriting an multipart upload object', done => {
|
||||||
|
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||||
|
mpuUtils.createMPU('default', bucketName, objectName, log,
|
||||||
|
(err, testUploadId) => {
|
||||||
|
objectPost(authInfo, testPostObjectRequest, undefined, log, err => {
|
||||||
|
assert.ifError(err);
|
||||||
|
sinon.assert.calledWith(metadata.putObjectMD,
|
||||||
|
any, any, any, sinon.match({ oldReplayId: testUploadId }), any, any);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('objectPost API with versioning', () => {
|
||||||
|
beforeEach(() => {
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
const objData = ['foo0', 'foo1', 'foo2'].map(str =>
|
||||||
|
Buffer.from(str, 'utf8'));
|
||||||
|
const testPostObjectRequests = objData.map(data => versioningTestUtils
|
||||||
|
.createPostObjectRequest(bucketName, objectName, data));
|
||||||
|
|
||||||
|
it('should delete latest version when creating new null version ' +
|
||||||
|
'if latest version is null version', done => {
|
||||||
|
async.series([
|
||||||
|
callback => bucketPut(authInfo, testPutBucketRequest, log,
|
||||||
|
callback),
|
||||||
|
// putting null version by putting obj before versioning configured
|
||||||
|
callback => objectPost(authInfo, testPostObjectRequests[0], undefined,
|
||||||
|
log, err => {
|
||||||
|
versioningTestUtils.assertDataStoreValues(ds, [objData[0]]);
|
||||||
|
callback(err);
|
||||||
|
}),
|
||||||
|
callback => bucketPutVersioning(authInfo, suspendVersioningRequest,
|
||||||
|
log, callback),
|
||||||
|
// creating new null version by putting obj after ver suspended
|
||||||
|
callback => objectPost(authInfo, testPostObjectRequests[1],
|
||||||
|
undefined, log, err => {
|
||||||
|
// wait until next tick since mem backend executes
|
||||||
|
// deletes in the next tick
|
||||||
|
setImmediate(() => {
|
||||||
|
// old null version should be deleted
|
||||||
|
versioningTestUtils.assertDataStoreValues(ds,
|
||||||
|
[undefined, objData[1]]);
|
||||||
|
callback(err);
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
// create another null version
|
||||||
|
callback => objectPost(authInfo, testPostObjectRequests[2],
|
||||||
|
undefined, log, err => {
|
||||||
|
setImmediate(() => {
|
||||||
|
// old null version should be deleted
|
||||||
|
versioningTestUtils.assertDataStoreValues(ds,
|
||||||
|
[undefined, undefined, objData[2]]);
|
||||||
|
callback(err);
|
||||||
|
});
|
||||||
|
}),
|
||||||
|
], done);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('when null version is not the latest version', () => {
|
||||||
|
const objData = ['foo0', 'foo1', 'foo2'].map(str =>
|
||||||
|
Buffer.from(str, 'utf8'));
|
||||||
|
const testPostObjectRequests = objData.map(data => versioningTestUtils
|
||||||
|
.createPostObjectRequest(bucketName, objectName, data));
|
||||||
|
beforeEach(done => {
|
||||||
|
async.series([
|
||||||
|
callback => bucketPut(authInfo, testPutBucketRequest, log,
|
||||||
|
callback),
|
||||||
|
// putting null version: put obj before versioning configured
|
||||||
|
callback => objectPost(authInfo, testPostObjectRequests[0],
|
||||||
|
undefined, log, callback),
|
||||||
|
callback => bucketPutVersioning(authInfo,
|
||||||
|
enableVersioningRequest, log, callback),
|
||||||
|
// put another version:
|
||||||
|
callback => objectPost(authInfo, testPostObjectRequests[1],
|
||||||
|
undefined, log, callback),
|
||||||
|
callback => bucketPutVersioning(authInfo,
|
||||||
|
suspendVersioningRequest, log, callback),
|
||||||
|
], err => {
|
||||||
|
if (err) {
|
||||||
|
return done(err);
|
||||||
|
}
|
||||||
|
versioningTestUtils.assertDataStoreValues(ds,
|
||||||
|
objData.slice(0, 2));
|
||||||
|
return done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should still delete null version when creating new null version',
|
||||||
|
done => {
|
||||||
|
objectPost(authInfo, testPostObjectRequests[2], undefined,
|
||||||
|
log, err => {
|
||||||
|
assert.ifError(err, `Unexpected err: ${err}`);
|
||||||
|
setImmediate(() => {
|
||||||
|
// old null version should be deleted after putting
|
||||||
|
// new null version
|
||||||
|
versioningTestUtils.assertDataStoreValues(ds,
|
||||||
|
[undefined, objData[1], objData[2]]);
|
||||||
|
done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should return BadDigest error and not leave orphans in data when ' +
|
||||||
|
'contentMD5 and completedHash do not match', done => {
|
||||||
|
const testPostObjectRequests = new DummyRequest({
|
||||||
|
bucketName,
|
||||||
|
formData: {
|
||||||
|
key: objectName,
|
||||||
|
},
|
||||||
|
fileEventData: {},
|
||||||
|
headers: {},
|
||||||
|
url: '/',
|
||||||
|
contentMD5: 'vnR+tLdVF79rPPfF+7YvOg==',
|
||||||
|
}, Buffer.from('I am another body', 'utf8'));
|
||||||
|
|
||||||
|
bucketPut(authInfo, testPutBucketRequest, log, () => {
|
||||||
|
objectPost(authInfo, testPostObjectRequests, undefined, log,
|
||||||
|
err => {
|
||||||
|
assert.deepStrictEqual(err, errors.BadDigest);
|
||||||
|
// orphan objects don't get deleted
|
||||||
|
// until the next tick
|
||||||
|
// in memory
|
||||||
|
setImmediate(() => {
|
||||||
|
// Data store starts at index 1
|
||||||
|
assert.strictEqual(ds[0], undefined);
|
||||||
|
assert.strictEqual(ds[1], undefined);
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
|
@ -374,6 +374,19 @@ const versioningTestUtils = {
|
||||||
};
|
};
|
||||||
return new DummyRequest(params, body);
|
return new DummyRequest(params, body);
|
||||||
},
|
},
|
||||||
|
createPostObjectRequest: (bucketName, keyName, body) => {
|
||||||
|
const params = {
|
||||||
|
bucketName,
|
||||||
|
formData: {
|
||||||
|
bucket: bucketName,
|
||||||
|
key: keyName,
|
||||||
|
},
|
||||||
|
fileEventData: {},
|
||||||
|
headers: {},
|
||||||
|
url: '/',
|
||||||
|
};
|
||||||
|
return new DummyRequest(params, body);
|
||||||
|
},
|
||||||
createBucketPutVersioningReq: (bucketName, status) => {
|
createBucketPutVersioningReq: (bucketName, status) => {
|
||||||
const request = {
|
const request = {
|
||||||
bucketName,
|
bucketName,
|
||||||
|
|
|
@ -16,6 +16,11 @@
|
||||||
enabled "2.0.x"
|
enabled "2.0.x"
|
||||||
kuler "^2.0.0"
|
kuler "^2.0.0"
|
||||||
|
|
||||||
|
"@fastify/busboy@^2.1.1":
|
||||||
|
version "2.1.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.1.1.tgz#b9da6a878a371829a0502c9b6c1c143ef6663f4d"
|
||||||
|
integrity sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==
|
||||||
|
|
||||||
"@gar/promisify@^1.0.1":
|
"@gar/promisify@^1.0.1":
|
||||||
version "1.1.3"
|
version "1.1.3"
|
||||||
resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6"
|
resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6"
|
||||||
|
@ -499,9 +504,9 @@ arraybuffer.slice@~0.0.7:
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
ioctl "^2.0.2"
|
ioctl "^2.0.2"
|
||||||
|
|
||||||
"arsenal@git+https://github.com/scality/arsenal#7.70.29":
|
"arsenal@git+https://github.com/scality/arsenal#61984fbac3721d72cfc05a0cba0a0965c158008d":
|
||||||
version "7.70.29"
|
version "7.70.29"
|
||||||
resolved "git+https://github.com/scality/arsenal#a643a3e6ccbc49327339a285de1d4cb17afcd171"
|
resolved "git+https://github.com/scality/arsenal#61984fbac3721d72cfc05a0cba0a0965c158008d"
|
||||||
dependencies:
|
dependencies:
|
||||||
"@js-sdsl/ordered-set" "^4.4.2"
|
"@js-sdsl/ordered-set" "^4.4.2"
|
||||||
"@types/async" "^3.2.12"
|
"@types/async" "^3.2.12"
|
||||||
|
|
Loading…
Reference in New Issue