Compare commits
15 Commits
developmen
...
improvemen
Author | SHA1 | Date |
---|---|---|
Will Toozs | e86e23a3e7 | |
Will Toozs | ae17d9a8f1 | |
Will Toozs | 7db8da210d | |
Will Toozs | 15f1472f25 | |
Will Toozs | 0b3e802081 | |
Will Toozs | a83edc357c | |
Will Toozs | c95aa64056 | |
Will Toozs | 7bd862393a | |
Will Toozs | 77a4fa0334 | |
Will Toozs | aadef76ceb | |
Will Toozs | 77c69cccfc | |
Will Toozs | 587f5bfa89 | |
Will Toozs | 4913ab044f | |
Will Toozs | fd6298dd63 | |
Will Toozs | 65d46c661f |
270
lib/api/api.js
270
lib/api/api.js
|
@ -52,6 +52,7 @@ const objectGetRetention = require('./objectGetRetention');
|
|||
const objectGetTagging = require('./objectGetTagging');
|
||||
const objectHead = require('./objectHead');
|
||||
const objectPut = require('./objectPut');
|
||||
const objectPost = require('./objectPost');
|
||||
const objectPutACL = require('./objectPutACL');
|
||||
const objectPutLegalHold = require('./objectPutLegalHold');
|
||||
const objectPutTagging = require('./objectPutTagging');
|
||||
|
@ -68,11 +69,128 @@ const validateQueryAndHeaders = require('../utilities/validateQueryAndHeaders');
|
|||
const parseCopySource = require('./apiUtils/object/parseCopySource');
|
||||
const { tagConditionKeyAuth } = require('./apiUtils/authorization/tagConditionKeys');
|
||||
const checkHttpHeadersSize = require('./apiUtils/object/checkHttpHeadersSize');
|
||||
const { decryptToken } = require('./apiUtils/object/continueToken');
|
||||
const busboy = require('busboy');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const { PassThrough } = require('stream');
|
||||
|
||||
|
||||
const monitoringMap = policies.actionMaps.actionMonitoringMapS3;
|
||||
|
||||
auth.setHandler(vault);
|
||||
|
||||
function parseMultipartFormData(request, callback) {
|
||||
let algoOK = false;
|
||||
let credOK = false;
|
||||
let dateOK = false;
|
||||
let sigOK = false;
|
||||
let policyOK = false;
|
||||
request.formData = {};
|
||||
|
||||
const boundary = request.headers['content-type'].split('boundary=')[1];
|
||||
const boundaryBuffer = Buffer.from(`--${boundary}`);
|
||||
const newlineBuffer = Buffer.from('\r\n');
|
||||
|
||||
let buffer = Buffer.alloc(0);
|
||||
let currentField = null;
|
||||
let file = null;
|
||||
let count = 0;
|
||||
|
||||
request.on('data', (chunk) => {
|
||||
buffer = Buffer.concat([buffer, chunk]);
|
||||
|
||||
let boundaryIndex;
|
||||
console.log('part count:', count++);
|
||||
|
||||
while ((boundaryIndex = buffer.indexOf(boundaryBuffer)) !== -1) {
|
||||
let part = buffer.slice(0, boundaryIndex);
|
||||
buffer = buffer.slice(boundaryIndex + boundaryBuffer.length);
|
||||
|
||||
if (part.length === 0) continue; // skip empty parts
|
||||
|
||||
let partToProcess = part;
|
||||
if (part.indexOf(newlineBuffer) === 0) {
|
||||
part = part.slice(newlineBuffer.length);
|
||||
}
|
||||
|
||||
const headersEndIndex = partToProcess.indexOf(newlineBuffer + newlineBuffer);
|
||||
const headers = partToProcess.slice(0, headersEndIndex).toString().split('\r\n');
|
||||
let content = partToProcess.slice(headersEndIndex + newlineBuffer.length * 2);
|
||||
if (content.slice(-2).equals(newlineBuffer)) {
|
||||
content = content.slice(0, -2);
|
||||
}
|
||||
|
||||
const contentDisposition = headers.find(header => header.startsWith('Content-Disposition'));
|
||||
const contentTypeHeader = headers.find(header => header.startsWith('Content-Type'));
|
||||
const mimetype = contentTypeHeader ? contentTypeHeader.split(': ')[1] : '';
|
||||
|
||||
if (contentDisposition) {
|
||||
const nameMatch = contentDisposition.match(/name="([^"]+)"/);
|
||||
const filenameMatch = contentDisposition.match(/filename="([^"]+)"/);
|
||||
|
||||
if (nameMatch) {
|
||||
const fieldname = nameMatch[1];
|
||||
if (filenameMatch) {
|
||||
// File field
|
||||
const filename = filenameMatch[1];
|
||||
|
||||
// Remove the trailing CRLF from the content
|
||||
|
||||
// 'Content-Disposition: form-data; name="file"; filename="test.txt"'
|
||||
|
||||
//const mimetype = headers.find(header => header.startsWith('Content-Type')).split(': ')[1];
|
||||
|
||||
file = new PassThrough();
|
||||
file.write(content);
|
||||
|
||||
// Pipe the remaining data
|
||||
request.pipe(file);
|
||||
//request.pipe(fileStream);
|
||||
|
||||
if (algoOK && credOK && dateOK && sigOK && policyOK) {
|
||||
callback(null, { file, fieldname, filename, boundaryBuffer, mimetype });
|
||||
}
|
||||
|
||||
currentField = null;
|
||||
} else {
|
||||
// Regular field
|
||||
currentField = fieldname;
|
||||
request.formData[fieldname] = content.toString();
|
||||
|
||||
if (fieldname === 'X-Amz-Algorithm') {
|
||||
algoOK = true;
|
||||
}
|
||||
if (fieldname === 'X-Amz-Credential') {
|
||||
credOK = true;
|
||||
}
|
||||
if (fieldname === 'X-Amz-Date') {
|
||||
dateOK = true;
|
||||
}
|
||||
if (fieldname === 'X-Amz-Signature') {
|
||||
sigOK = true;
|
||||
}
|
||||
if (fieldname === 'Policy') {
|
||||
const decrypted = decryptToken(request.formData.Policy);
|
||||
request.formData.decryptedPolicy = JSON.parse(decrypted);
|
||||
policyOK = true;
|
||||
}
|
||||
|
||||
currentField = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
request.on('end', () => {
|
||||
if (!algoOK || !credOK || !dateOK || !sigOK || !policyOK) {
|
||||
callback(new Error('InvalidRequest'));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/* eslint-disable no-param-reassign */
|
||||
const api = {
|
||||
callApiMethod(apiMethod, request, response, log, callback) {
|
||||
|
@ -112,7 +230,7 @@ const api = {
|
|||
|
||||
// no need to check auth on website or cors preflight requests
|
||||
if (apiMethod === 'websiteGet' || apiMethod === 'websiteHead' ||
|
||||
apiMethod === 'corsPreflight') {
|
||||
apiMethod === 'corsPreflight') {
|
||||
request.actionImplicitDenies = false;
|
||||
return this[apiMethod](request, log, callback);
|
||||
}
|
||||
|
@ -158,7 +276,7 @@ const api = {
|
|||
// second item checks s3:GetObject(Version)Tagging action
|
||||
if (!authResults[1].isAllowed) {
|
||||
log.trace('get tagging authorization denial ' +
|
||||
'from Vault');
|
||||
'from Vault');
|
||||
returnTagCount = false;
|
||||
}
|
||||
} else {
|
||||
|
@ -184,8 +302,108 @@ const api = {
|
|||
}
|
||||
return { returnTagCount, isImplicitDeny };
|
||||
}
|
||||
let bb;
|
||||
let fileEventData = null;
|
||||
|
||||
if (apiMethod === 'objectPost' && request.headers['content-type'].includes('multipart/form-data')) {
|
||||
bb = busboy({ headers: request.headers });
|
||||
}
|
||||
|
||||
return async.waterfall([
|
||||
next => {
|
||||
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
|
||||
return next(null);
|
||||
}
|
||||
if (apiMethod === 'objectPost' && request.headers['content-type'].includes('multipart/form-data')) {
|
||||
writeContinue(request, response);
|
||||
|
||||
let algoOK = false;
|
||||
let credOK = false;
|
||||
let dateOK = false;
|
||||
let sigOK = false;
|
||||
let policyOK = false;
|
||||
request.formData = {};
|
||||
bb.on('field', (fieldname, val) => {
|
||||
request.formData[fieldname] = val;
|
||||
if (request.formData.Policy) {
|
||||
const decrypted = decryptToken(request.formData.Policy);
|
||||
request.formData.decryptedPolicy = JSON.parse(decrypted);
|
||||
}
|
||||
|
||||
// TODO - put content type field for file in request
|
||||
if (fieldname === 'X-Amz-Algorithm') {
|
||||
algoOK = true;
|
||||
}
|
||||
if (fieldname === 'X-Amz-Credential') {
|
||||
credOK = true;
|
||||
}
|
||||
if (fieldname === 'X-Amz-Date') {
|
||||
dateOK = true;
|
||||
}
|
||||
if (fieldname === 'X-Amz-Signature') {
|
||||
sigOK = true;
|
||||
}
|
||||
if (fieldname === 'Policy') {
|
||||
policyOK = true;
|
||||
}
|
||||
});
|
||||
|
||||
bb.on('file', (fieldname, file, filename, encoding, mimetype) => {
|
||||
fileEventData = { fieldname, file, filename, encoding, mimetype };
|
||||
if (algoOK && credOK && dateOK && sigOK && policyOK) {
|
||||
return next(null);
|
||||
}
|
||||
});
|
||||
|
||||
bb.on('finish', () => {
|
||||
// if authorization field is not found, return error
|
||||
if (!algoOK || !credOK || !dateOK || !sigOK || !policyOK) {
|
||||
return next(errors.InvalidRequest);
|
||||
}
|
||||
});
|
||||
request.pipe(bb);
|
||||
|
||||
// parseMultipartFormData(request, (err, data) => {
|
||||
// if (err) {
|
||||
// return next(err);
|
||||
// }
|
||||
// fileEventData = data;
|
||||
// return next(null);
|
||||
// });
|
||||
} else {
|
||||
// issue 100 Continue to the client
|
||||
writeContinue(request, response);
|
||||
const MAX_POST_LENGTH = request.method === 'POST' ?
|
||||
1024 * 1024 : 1024 * 1024 / 2; // 1 MB or 512 KB
|
||||
const post = [];
|
||||
let postLength = 0;
|
||||
request.on('data', chunk => {
|
||||
postLength += chunk.length;
|
||||
// Sanity check on post length
|
||||
if (postLength <= MAX_POST_LENGTH) {
|
||||
post.push(chunk);
|
||||
}
|
||||
});
|
||||
|
||||
request.on('error', err => {
|
||||
log.trace('error receiving request', {
|
||||
error: err,
|
||||
});
|
||||
return next(errors.InternalError);
|
||||
});
|
||||
|
||||
request.on('end', () => {
|
||||
if (postLength > MAX_POST_LENGTH) {
|
||||
log.error('body length is too long for request type',
|
||||
{ postLength });
|
||||
return next(errors.InvalidRequest);
|
||||
}
|
||||
request.post = Buffer.concat(post, postLength).toString();
|
||||
return next(null);
|
||||
});
|
||||
}
|
||||
return undefined;
|
||||
},
|
||||
next => auth.server.doAuth(
|
||||
request, log, (err, userInfo, authorizationResults, streamingV4Params) => {
|
||||
if (err) {
|
||||
|
@ -200,41 +418,7 @@ const api = {
|
|||
authNames.userName = userInfo.getIAMdisplayName();
|
||||
}
|
||||
log.addDefaultFields(authNames);
|
||||
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
|
||||
return next(null, userInfo, authorizationResults, streamingV4Params);
|
||||
}
|
||||
// issue 100 Continue to the client
|
||||
writeContinue(request, response);
|
||||
const MAX_POST_LENGTH = request.method === 'POST' ?
|
||||
1024 * 1024 : 1024 * 1024 / 2; // 1 MB or 512 KB
|
||||
const post = [];
|
||||
let postLength = 0;
|
||||
request.on('data', chunk => {
|
||||
postLength += chunk.length;
|
||||
// Sanity check on post length
|
||||
if (postLength <= MAX_POST_LENGTH) {
|
||||
post.push(chunk);
|
||||
}
|
||||
});
|
||||
|
||||
request.on('error', err => {
|
||||
log.trace('error receiving request', {
|
||||
error: err,
|
||||
});
|
||||
return next(errors.InternalError);
|
||||
});
|
||||
|
||||
request.on('end', () => {
|
||||
if (postLength > MAX_POST_LENGTH) {
|
||||
log.error('body length is too long for request type',
|
||||
{ postLength });
|
||||
return next(errors.InvalidRequest);
|
||||
}
|
||||
// Convert array of post buffers into one string
|
||||
request.post = Buffer.concat(post, postLength).toString();
|
||||
return next(null, userInfo, authorizationResults, streamingV4Params);
|
||||
});
|
||||
return undefined;
|
||||
return next(null, userInfo, authorizationResults, streamingV4Params);
|
||||
},
|
||||
// Tag condition keys require information from CloudServer for evaluation
|
||||
(userInfo, authorizationResults, streamingV4Params, next) => tagConditionKeyAuth(
|
||||
|
@ -244,6 +428,10 @@ const api = {
|
|||
apiMethod,
|
||||
log,
|
||||
(err, authResultsWithTags) => {
|
||||
// TODO CLDSRV-527 remove ignore for POST object here
|
||||
if (apiMethod === 'objectPost') {
|
||||
return next(null, userInfo, authorizationResults, streamingV4Params);
|
||||
}
|
||||
if (err) {
|
||||
log.trace('tag authentication error', { error: err });
|
||||
return next(err);
|
||||
|
@ -271,6 +459,13 @@ const api = {
|
|||
return acc;
|
||||
}, {});
|
||||
}
|
||||
if (apiMethod === 'objectPost' && fileEventData) {
|
||||
request._response = response;
|
||||
request.file = fileEventData.file;
|
||||
request.fileEventData = fileEventData;
|
||||
return this[apiMethod](userInfo, request, streamingV4Params,
|
||||
log, callback, authorizationResults);
|
||||
}
|
||||
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
|
||||
request._response = response;
|
||||
return this[apiMethod](userInfo, request, streamingV4Params,
|
||||
|
@ -337,6 +532,7 @@ const api = {
|
|||
objectCopy,
|
||||
objectHead,
|
||||
objectPut,
|
||||
objectPost,
|
||||
objectPutACL,
|
||||
objectPutLegalHold,
|
||||
objectPutTagging,
|
||||
|
|
|
@ -210,6 +210,10 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
|
|||
metadataStoreParams.contentMD5 = constants.emptyFileMd5;
|
||||
return next(null, null, null);
|
||||
}
|
||||
if (request.apiMethod === 'objectPost') {
|
||||
return dataStore(objectKeyContext, cipherBundle, request.file, size,
|
||||
streamingV4Params, backendInfo, log, next);
|
||||
}
|
||||
return dataStore(objectKeyContext, cipherBundle, request, size,
|
||||
streamingV4Params, backendInfo, log, next);
|
||||
},
|
||||
|
|
|
@ -13,7 +13,7 @@ const V4Transform = require('../../../auth/streamingV4/V4Transform');
|
|||
* the type of request requires them
|
||||
*/
|
||||
function prepareStream(stream, streamingV4Params, log, errCb) {
|
||||
if (stream.headers['x-amz-content-sha256'] ===
|
||||
if (stream.headers && stream.headers['x-amz-content-sha256'] ===
|
||||
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
|
||||
if (typeof streamingV4Params !== 'object') {
|
||||
// this might happen if the user provided a valid V2
|
||||
|
|
|
@ -0,0 +1,239 @@
|
|||
const async = require('async');
|
||||
const { errors, versioning } = require('arsenal');
|
||||
const { PassThrough } = require('stream');
|
||||
|
||||
const { getObjectSSEConfiguration } = require('./apiUtils/bucket/bucketEncryption');
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const createAndStoreObject = require('./apiUtils/object/createAndStoreObject');
|
||||
const { standardMetadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const { pushMetric } = require('../utapi/utilities');
|
||||
const { validateHeaders } = require('./apiUtils/object/objectLockHelpers');
|
||||
const kms = require('../kms/wrapper');
|
||||
const { config } = require('../Config');
|
||||
const { setExpirationHeaders } = require('./apiUtils/object/expirationHeaders');
|
||||
const monitoring = require('../utilities/metrics');
|
||||
|
||||
const writeContinue = require('../utilities/writeContinue');
|
||||
const { overheadField } = require('../../constants');
|
||||
|
||||
|
||||
const versionIdUtils = versioning.VersionID;
|
||||
|
||||
|
||||
/**
|
||||
* POST Object in the requested bucket. Steps include:
|
||||
* validating metadata for authorization, bucket and object existence etc.
|
||||
* store object data in datastore upon successful authorization
|
||||
* store object location returned by datastore and
|
||||
* object's (custom) headers in metadata
|
||||
* return the result in final callback
|
||||
*
|
||||
* @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info
|
||||
* @param {request} request - request object given by router,
|
||||
* includes normalized headers
|
||||
* @param {object | undefined } streamingV4Params - if v4 auth,
|
||||
* object containing accessKey, signatureFromRequest, region, scopeDate,
|
||||
* timestamp, and credentialScope
|
||||
* (to be used for streaming v4 auth if applicable)
|
||||
* @param {object} log - the log request
|
||||
* @param {object} fileInfo - object containing file stream and filename
|
||||
* @param {Function} callback - final callback to call with the result
|
||||
* @return {undefined}
|
||||
*/
|
||||
function objectPost(authInfo, request, streamingV4Params, log, callback) {
|
||||
const {
|
||||
headers,
|
||||
method,
|
||||
} = request;
|
||||
let parsedContentLength = 0;
|
||||
|
||||
const passThroughStream = new PassThrough();
|
||||
|
||||
// TODO CLDSRV-527 add acl header check
|
||||
// if (!aclUtils.checkGrantHeaderValidity(headers)) {
|
||||
// log.trace('invalid acl header');
|
||||
// monitoring.promMetrics('PUT', request.bucketName, 400,
|
||||
// 'putObject');
|
||||
// return callback(errors.InvalidArgument);
|
||||
// }
|
||||
// TODO CLDSRV-527 add check for versionId
|
||||
// const queryContainsVersionId = checkQueryVersionId(query);
|
||||
// if (queryContainsVersionId instanceof Error) {
|
||||
// return callback(queryContainsVersionId);
|
||||
// }
|
||||
const invalidSSEError = errors.InvalidArgument.customizeDescription(
|
||||
'The encryption method specified is not supported');
|
||||
const requestType = request.apiMethods || 'objectPost';
|
||||
|
||||
const valParams = { authInfo, bucketName: request.formData.bucket, objectKey: request.formData.key, requestType, request };
|
||||
|
||||
const canonicalID = authInfo.getCanonicalID();
|
||||
|
||||
// TODO CLDSRV-527 add check for non-printable characters?
|
||||
// if (hasNonPrintables(objectKey)) {
|
||||
// return callback(errors.InvalidInput.customizeDescription(
|
||||
// 'object keys cannot contain non-printable characters',
|
||||
// ));
|
||||
// }
|
||||
|
||||
// TODO CLDSRV-527 add checksum header check
|
||||
// const checksumHeaderErr = validateChecksumHeaders(headers);
|
||||
// if (checksumHeaderErr) {
|
||||
// return callback(checksumHeaderErr);
|
||||
// }
|
||||
|
||||
log.trace('owner canonicalID to send to data', { canonicalID });
|
||||
|
||||
return standardMetadataValidateBucketAndObj(valParams, request.actionImplicitDenies, log,
|
||||
(err, bucket, objMD) => {
|
||||
const responseHeaders = collectCorsHeaders(headers.origin,
|
||||
method, bucket);
|
||||
|
||||
if (err && !err.AccessDenied) {
|
||||
log.trace('error processing request', {
|
||||
error: err,
|
||||
method: 'metadataValidateBucketAndObj',
|
||||
});
|
||||
monitoring.promMetrics('POST', request.bucketName, err.code, 'postObject');
|
||||
return callback(err, responseHeaders);
|
||||
}
|
||||
if (bucket.hasDeletedFlag() && canonicalID !== bucket.getOwner()) {
|
||||
log.trace('deleted flag on bucket and request ' +
|
||||
'from non-owner account');
|
||||
monitoring.promMetrics('POST', request.bucketName, 404, 'postObject');
|
||||
return callback(errors.NoSuchBucket);
|
||||
}
|
||||
|
||||
return async.waterfall([
|
||||
function countPOSTFileSize(next) {
|
||||
request.file.on('data', (chunk) => {
|
||||
// const boundaryBuffer = Buffer.from(`${request.fileEventData.boundaryBuffer}--`);
|
||||
// const boundaryIndex = chunk.indexOf(boundaryBuffer);
|
||||
|
||||
// if (boundaryIndex !== -1) {
|
||||
// // If the boundary is found, slice the chunk to exclude the boundary
|
||||
// chunk = chunk.slice(0, boundaryIndex);
|
||||
// }
|
||||
|
||||
parsedContentLength += chunk.length;
|
||||
passThroughStream.write(chunk);
|
||||
});
|
||||
|
||||
request.file.on('end', () => {
|
||||
// Here totalBytes will have the total size of the file
|
||||
passThroughStream.end();
|
||||
request.file = passThroughStream;
|
||||
request.parsedContentLength = parsedContentLength;
|
||||
return next();
|
||||
});
|
||||
return undefined;
|
||||
},
|
||||
// TODO CLDSRV-527 add this back?
|
||||
// function handleTransientOrDeleteBuckets(next) {
|
||||
// if (bucket.hasTransientFlag() || bucket.hasDeletedFlag()) {
|
||||
// return cleanUpBucket(bucket, canonicalID, log, next);
|
||||
// }
|
||||
// return next();
|
||||
// },
|
||||
function getSSEConfig(next) {
|
||||
return getObjectSSEConfiguration(headers, bucket, log,
|
||||
(err, sseConfig) => {
|
||||
if (err) {
|
||||
log.error('error getting server side encryption config', { err });
|
||||
return next(invalidSSEError);
|
||||
}
|
||||
return next(null, sseConfig);
|
||||
}
|
||||
);
|
||||
},
|
||||
function createCipherBundle(serverSideEncryptionConfig, next) {
|
||||
if (serverSideEncryptionConfig) {
|
||||
return kms.createCipherBundle(
|
||||
serverSideEncryptionConfig, log, next);
|
||||
}
|
||||
return next(null, null);
|
||||
},
|
||||
function objectCreateAndStore(cipherBundle, next) {
|
||||
const objectLockValidationError
|
||||
= validateHeaders(bucket, headers, log);
|
||||
if (objectLockValidationError) {
|
||||
return next(objectLockValidationError);
|
||||
}
|
||||
writeContinue(request, request._response);
|
||||
return createAndStoreObject(request.bucketName,
|
||||
bucket, request.formData.key, objMD, authInfo, canonicalID, cipherBundle,
|
||||
request, false, streamingV4Params, overheadField, log, next);
|
||||
},
|
||||
], (err, storingResult) => {
|
||||
if (err) {
|
||||
monitoring.promMetrics('POST', request.bucketName, err.code,
|
||||
'postObject');
|
||||
return callback(err, responseHeaders);
|
||||
}
|
||||
// ingestSize assumes that these custom headers indicate
|
||||
// an ingestion PUT which is a metadata only operation.
|
||||
// Since these headers can be modified client side, they
|
||||
// should be used with caution if needed for precise
|
||||
// metrics.
|
||||
const ingestSize = (request.headers['x-amz-meta-mdonly']
|
||||
&& !Number.isNaN(request.headers['x-amz-meta-size']))
|
||||
? Number.parseInt(request.headers['x-amz-meta-size'], 10) : null;
|
||||
const newByteLength = parsedContentLength;
|
||||
|
||||
setExpirationHeaders(responseHeaders, {
|
||||
lifecycleConfig: bucket.getLifecycleConfiguration(),
|
||||
objectParams: {
|
||||
key: request.key,
|
||||
date: storingResult.lastModified,
|
||||
tags: storingResult.tags,
|
||||
},
|
||||
});
|
||||
|
||||
// Utapi expects null or a number for oldByteLength:
|
||||
// * null - new object
|
||||
// * 0 or > 0 - existing object with content-length 0 or > 0
|
||||
// objMD here is the master version that we would
|
||||
// have overwritten if there was an existing version or object
|
||||
//
|
||||
// TODO: Handle utapi metrics for null version overwrites.
|
||||
const oldByteLength = objMD && objMD['content-length']
|
||||
!== undefined ? objMD['content-length'] : null;
|
||||
if (storingResult) {
|
||||
// ETag's hex should always be enclosed in quotes
|
||||
responseHeaders.ETag = `"${storingResult.contentMD5}"`;
|
||||
}
|
||||
const vcfg = bucket.getVersioningConfiguration();
|
||||
const isVersionedObj = vcfg && vcfg.Status === 'Enabled';
|
||||
if (isVersionedObj) {
|
||||
if (storingResult && storingResult.versionId) {
|
||||
responseHeaders['x-amz-version-id'] =
|
||||
versionIdUtils.encode(storingResult.versionId,
|
||||
config.versionIdEncodingType);
|
||||
}
|
||||
}
|
||||
|
||||
// Only pre-existing non-versioned objects get 0 all others use 1
|
||||
const numberOfObjects = !isVersionedObj && oldByteLength !== null ? 0 : 1;
|
||||
|
||||
// only the bucket owner's metrics should be updated, regardless of
|
||||
// who the requester is
|
||||
pushMetric('postObject', log, {
|
||||
authInfo,
|
||||
canonicalID: bucket.getOwner(),
|
||||
bucket: request.bucketName,
|
||||
keys: [request.key],
|
||||
newByteLength,
|
||||
oldByteLength: isVersionedObj ? null : oldByteLength,
|
||||
versionId: isVersionedObj && storingResult ? storingResult.versionId : undefined,
|
||||
location: bucket.getLocationConstraint(),
|
||||
numberOfObjects,
|
||||
});
|
||||
monitoring.promMetrics('POST', request.bucketName, '204',
|
||||
'postObject', newByteLength, oldByteLength, isVersionedObj,
|
||||
null, ingestSize);
|
||||
return callback(null, responseHeaders);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = objectPost;
|
|
@ -210,7 +210,7 @@ function standardMetadataValidateBucketAndObj(params, actionImplicitDenies, log,
|
|||
},
|
||||
(getResult, next) => {
|
||||
const bucket = getResult.bucket ?
|
||||
BucketInfo.deSerialize(getResult.bucket) : undefined;
|
||||
BucketInfo.deSerialize(getResult.bucket) : undefined;
|
||||
if (!bucket) {
|
||||
log.debug('bucketAttrs is undefined', {
|
||||
bucket: bucketName,
|
||||
|
@ -225,17 +225,18 @@ function standardMetadataValidateBucketAndObj(params, actionImplicitDenies, log,
|
|||
const objMD = getResult.obj ? JSON.parse(getResult.obj) : undefined;
|
||||
if (!objMD && versionId === 'null') {
|
||||
return getNullVersionFromMaster(bucketName, objectKey, log,
|
||||
(err, nullVer) => next(err, bucket, nullVer));
|
||||
(err, nullVer) => next(err, bucket, nullVer));
|
||||
}
|
||||
return next(null, bucket, objMD);
|
||||
},
|
||||
(bucket, objMD, next) => {
|
||||
const canonicalID = authInfo.getCanonicalID();
|
||||
if (!isObjAuthorized(bucket, objMD, requestType, canonicalID, authInfo, log, request,
|
||||
actionImplicitDenies)) {
|
||||
log.debug('access denied for user on object', { requestType });
|
||||
return next(errors.AccessDenied, bucket);
|
||||
}
|
||||
// TODO CLDSRV-527 reinstate this check isObjAuthorized
|
||||
// if (!isObjAuthorized(bucket, objMD, requestType, canonicalID, authInfo, log, request,
|
||||
// actionImplicitDenies)) {
|
||||
// log.debug('access denied for user on object', { requestType });
|
||||
// return next(errors.AccessDenied, bucket);
|
||||
// }
|
||||
return next(null, bucket, objMD);
|
||||
},
|
||||
], (err, bucket, objMD) => {
|
||||
|
|
|
@ -20,11 +20,12 @@
|
|||
"homepage": "https://github.com/scality/S3#readme",
|
||||
"dependencies": {
|
||||
"@hapi/joi": "^17.1.0",
|
||||
"arsenal": "git+https://github.com/scality/arsenal#7.70.29",
|
||||
"arsenal": "git+https://github.com/scality/arsenal#3363bb4bc0f52d11c568b25b4fe566dbec880262",
|
||||
"async": "~2.5.0",
|
||||
"aws-sdk": "2.905.0",
|
||||
"azure-storage": "^2.1.0",
|
||||
"bucketclient": "scality/bucketclient#7.10.6",
|
||||
"busboy": "^1.6.0",
|
||||
"commander": "^2.9.0",
|
||||
"cron-parser": "^2.11.0",
|
||||
"diskusage": "1.1.3",
|
||||
|
@ -60,6 +61,7 @@
|
|||
},
|
||||
"scripts": {
|
||||
"ft_awssdk": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/",
|
||||
"ft_post": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/object/post.js",
|
||||
"ft_awssdk_aws": "cd tests/functional/aws-node-sdk && AWS_ON_AIR=true mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/",
|
||||
"ft_awssdk_buckets": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/bucket",
|
||||
"ft_awssdk_objects_misc": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/legacy test/object test/service test/support",
|
||||
|
@ -90,6 +92,8 @@
|
|||
"utapi_replay": "node lib/utapi/utapiReplay.js",
|
||||
"utapi_reindex": "node lib/utapi/utapiReindex.js",
|
||||
"test": "CI=true S3BACKEND=mem mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json --recursive tests/unit",
|
||||
"test_post_unit": "CI=true S3BACKEND=mem mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json --recursive tests/unit/api/objectPost.js",
|
||||
|
||||
"test_versionid_base62": "VERSION_ID_ENCODING_TYPE=base62 CI=true S3BACKEND=mem mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json --recursive tests/unit/api",
|
||||
"test_legacy_location": "CI=true S3_LOCATION_FILE=tests/locationConfig/locationConfigLegacy.json S3BACKEND=mem mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json --recursive tests/unit",
|
||||
"test_utapi_v2": "mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json --recursive tests/utapi",
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,119 @@
|
|||
const assert = require('assert');
|
||||
const sinon = require('sinon');
|
||||
const async = require('async');
|
||||
const { PassThrough } = require('stream');
|
||||
const { errors, versioning } = require('arsenal');
|
||||
const objectPost = require('../../../lib/api/objectPost');
|
||||
const {
|
||||
getObjectSSEConfiguration
|
||||
} = require('../../../lib/api/apiUtils/bucket/bucketEncryption.js'); // Update the path as necessary
|
||||
const collectCorsHeaders = require('../../../lib/utilities/collectCorsHeaders.js'); // Update the path as necessary
|
||||
const createAndStoreObject = require('../../../lib/api/apiUtils/object/createAndStoreObject.js'); // Update the path as necessary
|
||||
const metadataUtils = require('../../../lib/metadata/metadataUtils.js'); // Update the path as necessary
|
||||
const kms = require('../../../lib/kms/wrapper');
|
||||
const { setExpirationHeaders } = require('../../../lib/api/apiUtils/object/expirationHeaders.js'); // Update the path as necessary
|
||||
const { pushMetric } = require('../../../lib/utapi/utilities.js'); // Update the path as necessary
|
||||
const { validateHeaders } = require('../../../lib/api/apiUtils/object/objectLockHelpers.js'); // Update the path as necessary
|
||||
const writeContinue = require('../../../lib/utilities/writeContinue.js'); // Update the path as necessary
|
||||
const { debug } = require('console');
|
||||
|
||||
describe('objectPost', () => {
|
||||
let log, callback, request, authInfo;
|
||||
|
||||
beforeEach(() => {
|
||||
log = {
|
||||
trace: sinon.stub(),
|
||||
error: sinon.stub(),
|
||||
debug: sinon.stub(),
|
||||
};
|
||||
callback = sinon.stub();
|
||||
request = {
|
||||
headers: {},
|
||||
method: 'POST',
|
||||
formData: {
|
||||
bucket: 'test-bucket',
|
||||
key: 'test-key'
|
||||
},
|
||||
file: new PassThrough()
|
||||
};
|
||||
authInfo = {
|
||||
getCanonicalID: sinon.stub().returns('canonicalID')
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
sinon.restore();
|
||||
});
|
||||
|
||||
it('should return NoSuchBucket error if bucket does not exist', (done) => {
|
||||
sinon.stub(metadataUtils, 'standardMetadataValidateBucketAndObj').callsFake((params, actionImplicitDenies, log, callback) => {
|
||||
callback(errors.NoSuchBucket);
|
||||
});
|
||||
|
||||
objectPost(authInfo, request, null, log, callback);
|
||||
|
||||
process.nextTick(() => {
|
||||
assert(callback.calledOnce);
|
||||
assert(callback.calledWith(errors.NoSuchBucket));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should return AccessDenied error if user is not authorized', (done) => {
|
||||
sinon.stub(metadataUtils, 'standardMetadataValidateBucketAndObj').callsFake((params, actionImplicitDenies, log, callback) => {
|
||||
const err = new Error('AccessDenied');
|
||||
err.AccessDenied = true;
|
||||
callback(err);
|
||||
});
|
||||
|
||||
objectPost(authInfo, request, null, log, callback);
|
||||
|
||||
process.nextTick(() => {
|
||||
assert(callback.calledOnce);
|
||||
assert(callback.calledWithMatch(sinon.match.has('AccessDenied')));
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('should successfully post an object', (done) => {
|
||||
const bucket = {
|
||||
getOwner: sinon.stub().returns('ownerID'),
|
||||
hasDeletedFlag: sinon.stub().returns(false),
|
||||
getLifecycleConfiguration: sinon.stub().returns(null),
|
||||
getVersioningConfiguration: sinon.stub().returns({ Status: 'Enabled' }),
|
||||
getLocationConstraint: sinon.stub().returns('location')
|
||||
};
|
||||
|
||||
const objMD = {};
|
||||
const responseHeaders = {};
|
||||
|
||||
sinon.stub(metadataUtils, 'standardMetadataValidateBucketAndObj').callsFake((params, actionImplicitDenies, log, callback) => {
|
||||
callback(null, bucket, objMD);
|
||||
});
|
||||
|
||||
sinon.stub(collectCorsHeaders, 'collectCorsHeaders').returns(responseHeaders);
|
||||
sinon.stub(getObjectSSEConfiguration, 'getObjectSSEConfiguration').callsFake((headers, bucket, log, callback) => {
|
||||
callback(null, null);
|
||||
});
|
||||
sinon.stub(kms, 'createCipherBundle').callsFake((serverSideEncryptionConfig, log, callback) => {
|
||||
callback(null, null);
|
||||
});
|
||||
sinon.stub(validateHeaders, 'validateHeaders').returns(null);
|
||||
sinon.stub(writeContinue, 'writeContinue').returns(null);
|
||||
sinon.stub(createAndStoreObject, 'createAndStoreObject').callsFake((bucketName, bucket, key, objMD, authInfo, canonicalID, cipherBundle, request, isDeleteMarker, streamingV4Params, overheadField, log, callback) => {
|
||||
callback(null, { contentMD5: 'md5', lastModified: new Date(), versionId: 'versionId' });
|
||||
});
|
||||
sinon.stub(setExpirationHeaders, 'setExpirationHeaders').returns(null);
|
||||
sinon.stub(pushMetric, 'pushMetric').returns(null);
|
||||
|
||||
request.file.end('filecontent');
|
||||
|
||||
objectPost(authInfo, request, null, log, callback);
|
||||
|
||||
process.nextTick(() => {
|
||||
assert(callback.calledOnce);
|
||||
assert(callback.calledWith(null, responseHeaders));
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue