Compare commits

...

15 Commits

Author SHA1 Message Date
Will Toozs 824b593a19
fixup: arsenal package 2024-07-05 14:45:07 +02:00
Will Toozs df8edfdec0
fixup: api handler fix 2024-07-05 14:45:07 +02:00
Will Toozs 413d0e374b
lint 2024-07-05 14:45:07 +02:00
Will Toozs a4b10edad0
fixup: func tests: auth functional tests 2024-07-05 14:45:06 +02:00
Will Toozs 9f8df2f24b
fixup: api: add fileevent check for no file data 2024-07-05 14:45:06 +02:00
Will Toozs 9534fe4cc6
fixup: api: remove policy decrypt 2024-07-05 14:45:06 +02:00
Will Toozs 471902ebde
CLDSRV-551: functional tests 2024-07-05 14:45:05 +02:00
Will Toozs 456d5b79e3
fixup: api handler 2024-07-05 14:45:05 +02:00
Will Toozs f10b5541fd
LDSRV-551: add auth field checking in api handler 2024-07-05 14:45:04 +02:00
Will Toozs d18f9ecd70
CLDSRV-546: unit tests 2024-07-04 16:08:58 +02:00
Will Toozs c4860c3817
CLDSRV-546: functional tests 2024-07-04 16:08:58 +02:00
Will Toozs 19f671bbf9
CLDSRV-546: debugged prepareStream 2024-07-04 16:08:58 +02:00
Will Toozs 20a6daa395
CLDSRV-546: post object action 2024-07-04 16:08:58 +02:00
Will Toozs 4d826c9f36
CLDSRV-546: add form data handling to api handler 2024-07-04 16:08:57 +02:00
Will Toozs 8f64c9c227
CLDSRV-546: update package for postObject support 2024-07-02 11:36:58 +02:00
9 changed files with 1454 additions and 43 deletions

View File

@ -1,5 +1,6 @@
const { auth, errors, policies } = require('arsenal'); const { auth, errors, policies } = require('arsenal');
const async = require('async'); const async = require('async');
const busboy = require('@fastify/busboy');
const bucketDelete = require('./bucketDelete'); const bucketDelete = require('./bucketDelete');
const bucketDeleteCors = require('./bucketDeleteCors'); const bucketDeleteCors = require('./bucketDeleteCors');
@ -52,6 +53,7 @@ const objectGetRetention = require('./objectGetRetention');
const objectGetTagging = require('./objectGetTagging'); const objectGetTagging = require('./objectGetTagging');
const objectHead = require('./objectHead'); const objectHead = require('./objectHead');
const objectPut = require('./objectPut'); const objectPut = require('./objectPut');
const objectPost = require('./objectPost');
const objectPutACL = require('./objectPutACL'); const objectPutACL = require('./objectPutACL');
const objectPutLegalHold = require('./objectPutLegalHold'); const objectPutLegalHold = require('./objectPutLegalHold');
const objectPutTagging = require('./objectPutTagging'); const objectPutTagging = require('./objectPutTagging');
@ -112,7 +114,7 @@ const api = {
// no need to check auth on website or cors preflight requests // no need to check auth on website or cors preflight requests
if (apiMethod === 'websiteGet' || apiMethod === 'websiteHead' || if (apiMethod === 'websiteGet' || apiMethod === 'websiteHead' ||
apiMethod === 'corsPreflight') { apiMethod === 'corsPreflight') {
request.actionImplicitDenies = false; request.actionImplicitDenies = false;
return this[apiMethod](request, log, callback); return this[apiMethod](request, log, callback);
} }
@ -158,7 +160,7 @@ const api = {
// second item checks s3:GetObject(Version)Tagging action // second item checks s3:GetObject(Version)Tagging action
if (!authResults[1].isAllowed) { if (!authResults[1].isAllowed) {
log.trace('get tagging authorization denial ' + log.trace('get tagging authorization denial ' +
'from Vault'); 'from Vault');
returnTagCount = false; returnTagCount = false;
} }
} else { } else {
@ -184,8 +186,114 @@ const api = {
} }
return { returnTagCount, isImplicitDeny }; return { returnTagCount, isImplicitDeny };
} }
let bb;
let fileEventData = null;
if (apiMethod === 'objectPost') {
bb = busboy({ headers: request.headers });
}
return async.waterfall([ return async.waterfall([
next => {
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
return next(null);
}
if (apiMethod === 'objectPost' && request.headers['content-type'].includes('multipart/form-data')) {
writeContinue(request, response);
let algoOK = false;
let credOK = false;
let dateOK = false;
let sigOK = false;
let policyOK = false;
request.formData = {};
let totalFieldSize = 0;
const MAX_FIELD_SIZE = 20 * 1024; // 20KB
bb.on('field', (fieldname, val) => {
totalFieldSize += Buffer.byteLength(val, 'utf8');
if (totalFieldSize > MAX_FIELD_SIZE) {
const err = errors.MaxPostPreDataLengthExceeded;
bb.emit('error', err);
}
// Convert fieldname to lowercase for case-insensitive comparison
const lowerFieldname = fieldname.toLowerCase();
request.formData[lowerFieldname] = val;
if (lowerFieldname === 'x-amz-algorithm') {
algoOK = true;
}
if (lowerFieldname === 'x-amz-credential') {
credOK = true;
}
if (lowerFieldname === 'x-amz-date') {
dateOK = true;
}
if (lowerFieldname === 'x-amz-signature') {
sigOK = true;
}
if (lowerFieldname === 'policy') {
policyOK = true;
}
});
bb.on('file', (fieldname, file, filename, encoding, mimetype) => {
fileEventData = { fieldname, file, filename, encoding, mimetype };
if (algoOK && credOK && dateOK && sigOK && policyOK) {
return next(null);
}
return undefined;
});
bb.on('finish', () => {
// if fields are found but no file, continue
if ((algoOK && credOK && dateOK && sigOK && policyOK) && !fileEventData) {
return next(null);
}
return undefined;
});
bb.on('error', (err) => {
log.trace('Error processing form data:', {
error: err,
});
request.unpipe(bb);
return next(err);
});
request.pipe(bb);
} else {
// issue 100 Continue to the client
writeContinue(request, response);
const MAX_POST_LENGTH = request.method === 'POST' ?
1024 * 1024 : 1024 * 1024 / 2; // 1 MB or 512 KB
const post = [];
let postLength = 0;
request.on('data', chunk => {
postLength += chunk.length;
// Sanity check on post length
if (postLength <= MAX_POST_LENGTH) {
post.push(chunk);
}
});
request.on('error', err => {
log.trace('error receiving request', {
error: err,
});
return next(errors.InternalError);
});
request.on('end', () => {
if (postLength > MAX_POST_LENGTH) {
log.error('body length is too long for request type',
{ postLength });
return next(errors.InvalidRequest);
}
request.post = Buffer.concat(post, postLength).toString();
return next(null);
});
}
return undefined;
},
next => auth.server.doAuth( next => auth.server.doAuth(
request, log, (err, userInfo, authorizationResults, streamingV4Params) => { request, log, (err, userInfo, authorizationResults, streamingV4Params) => {
if (err) { if (err) {
@ -200,41 +308,7 @@ const api = {
authNames.userName = userInfo.getIAMdisplayName(); authNames.userName = userInfo.getIAMdisplayName();
} }
log.addDefaultFields(authNames); log.addDefaultFields(authNames);
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') { return next(null, userInfo, authorizationResults, streamingV4Params);
return next(null, userInfo, authorizationResults, streamingV4Params);
}
// issue 100 Continue to the client
writeContinue(request, response);
const MAX_POST_LENGTH = request.method === 'POST' ?
1024 * 1024 : 1024 * 1024 / 2; // 1 MB or 512 KB
const post = [];
let postLength = 0;
request.on('data', chunk => {
postLength += chunk.length;
// Sanity check on post length
if (postLength <= MAX_POST_LENGTH) {
post.push(chunk);
}
});
request.on('error', err => {
log.trace('error receiving request', {
error: err,
});
return next(errors.InternalError);
});
request.on('end', () => {
if (postLength > MAX_POST_LENGTH) {
log.error('body length is too long for request type',
{ postLength });
return next(errors.InvalidRequest);
}
// Convert array of post buffers into one string
request.post = Buffer.concat(post, postLength).toString();
return next(null, userInfo, authorizationResults, streamingV4Params);
});
return undefined;
}, },
// Tag condition keys require information from CloudServer for evaluation // Tag condition keys require information from CloudServer for evaluation
(userInfo, authorizationResults, streamingV4Params, next) => tagConditionKeyAuth( (userInfo, authorizationResults, streamingV4Params, next) => tagConditionKeyAuth(
@ -271,6 +345,15 @@ const api = {
return acc; return acc;
}, {}); }, {});
} }
if (apiMethod === 'objectPost') {
request._response = response;
if (fileEventData) {
request.fileEventData = fileEventData;
request.headers['content-type'] = fileEventData.mimetype;
}
return this[apiMethod](userInfo, request, streamingV4Params,
log, callback, authorizationResults);
}
if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') { if (apiMethod === 'objectPut' || apiMethod === 'objectPutPart') {
request._response = response; request._response = response;
return this[apiMethod](userInfo, request, streamingV4Params, return this[apiMethod](userInfo, request, streamingV4Params,
@ -337,6 +420,7 @@ const api = {
objectCopy, objectCopy,
objectHead, objectHead,
objectPut, objectPut,
objectPost,
objectPutACL, objectPutACL,
objectPutLegalHold, objectPutLegalHold,
objectPutTagging, objectPutTagging,

View File

@ -210,8 +210,18 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
metadataStoreParams.contentMD5 = constants.emptyFileMd5; metadataStoreParams.contentMD5 = constants.emptyFileMd5;
return next(null, null, null); return next(null, null, null);
} }
return dataStore(objectKeyContext, cipherBundle, request, size, // Object Post receives a file stream.
streamingV4Params, backendInfo, log, next); // This is to be used to store data instead of the request stream itself.
let stream;
if (request.apiMethod === 'objectPost') {
stream = request.fileEventData ? request.fileEventData.file : undefined;
} else {
stream = request;
}
return dataStore(objectKeyContext, cipherBundle, stream, size, streamingV4Params, backendInfo, log, next);
}, },
function processDataResult(dataGetInfo, calculatedHash, next) { function processDataResult(dataGetInfo, calculatedHash, next) {
if (dataGetInfo === null || dataGetInfo === undefined) { if (dataGetInfo === null || dataGetInfo === undefined) {

View File

@ -13,7 +13,7 @@ const V4Transform = require('../../../auth/streamingV4/V4Transform');
* the type of request requires them * the type of request requires them
*/ */
function prepareStream(stream, streamingV4Params, log, errCb) { function prepareStream(stream, streamingV4Params, log, errCb) {
if (stream.headers['x-amz-content-sha256'] === if (stream && stream.headers && stream.headers['x-amz-content-sha256'] ===
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') { 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') {
if (typeof streamingV4Params !== 'object') { if (typeof streamingV4Params !== 'object') {
// this might happen if the user provided a valid V2 // this might happen if the user provided a valid V2

135
lib/api/objectPost.js Normal file
View File

@ -0,0 +1,135 @@
const async = require('async');
const { errors, versioning } = require('arsenal');
const { PassThrough } = require('stream');
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
const createAndStoreObject = require('./apiUtils/object/createAndStoreObject');
const { standardMetadataValidateBucketAndObj } = require('../metadata/metadataUtils');
const { config } = require('../Config');
const { setExpirationHeaders } = require('./apiUtils/object/expirationHeaders');
const writeContinue = require('../utilities/writeContinue');
const { overheadField } = require('../../constants');
const versionIdUtils = versioning.VersionID;
/**
* POST Object in the requested bucket. Steps include:
* validating metadata for authorization, bucket and object existence etc.
* store object data in datastore upon successful authorization
* store object location returned by datastore and
* object's (custom) headers in metadata
* return the result in final callback
*
* @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info
* @param {request} request - request object given by router,
* includes normalized headers
* @param {object | undefined } streamingV4Params - if v4 auth,
* object containing accessKey, signatureFromRequest, region, scopeDate,
* timestamp, and credentialScope
* (to be used for streaming v4 auth if applicable)
* @param {object} log - the log request
* @param {Function} callback - final callback to call with the result
* @return {undefined}
*/
function objectPost(authInfo, request, streamingV4Params, log, callback) {
const {
headers,
method,
} = request;
let parsedContentLength = 0;
const passThroughStream = new PassThrough();
const requestType = request.apiMethods || 'objectPost';
const valParams = {
authInfo,
bucketName: request.formData.bucket,
objectKey: request.formData.key,
requestType,
request,
};
const canonicalID = authInfo.getCanonicalID();
log.trace('owner canonicalID to send to data', { canonicalID });
return standardMetadataValidateBucketAndObj(valParams, request.actionImplicitDenies, log,
(err, bucket, objMD) => {
const responseHeaders = collectCorsHeaders(headers.origin,
method, bucket);
if (err && !err.AccessDenied) {
log.trace('error processing request', {
error: err,
method: 'metadataValidateBucketAndObj',
});
return callback(err, responseHeaders);
}
if (bucket.hasDeletedFlag() && canonicalID !== bucket.getOwner()) {
log.trace('deleted flag on bucket and request ' +
'from non-owner account');
return callback(errors.NoSuchBucket);
}
return async.waterfall([
function countPOSTFileSize(next) {
if (!request.fileEventData || !request.fileEventData.file) {
return next();
}
request.fileEventData.file.on('data', (chunk) => {
parsedContentLength += chunk.length;
passThroughStream.write(chunk);
});
request.fileEventData.file.on('end', () => {
passThroughStream.end();
// Setting the file in the request avoids the need to make changes to createAndStoreObject's
// parameters and thus all it's subsequent calls. This is necessary as the stream used to create
// the object is that of the request directly; something we must work around
// to use the file data produced from the multipart form data.
/* eslint-disable no-param-reassign */
request.fileEventData.file = passThroughStream;
/* eslint-disable no-param-reassign */
// Here parsedContentLength will have the total size of the file
// This is used when calculating the size of the object in createAndStoreObject
request.parsedContentLength = parsedContentLength;
return next();
});
return undefined;
},
function objectCreateAndStore(next) {
writeContinue(request, request._response);
return createAndStoreObject(request.bucketName,
bucket, request.formData.key, objMD, authInfo, canonicalID, null,
request, false, streamingV4Params, overheadField, log, next);
},
], (err, storingResult) => {
if (err) {
return callback(err, responseHeaders);
}
setExpirationHeaders(responseHeaders, {
lifecycleConfig: bucket.getLifecycleConfiguration(),
objectParams: {
key: request.key,
date: storingResult.lastModified,
tags: storingResult.tags,
},
});
if (storingResult) {
// ETag's hex should always be enclosed in quotes
responseHeaders.ETag = `"${storingResult.contentMD5}"`;
}
const vcfg = bucket.getVersioningConfiguration();
const isVersionedObj = vcfg && vcfg.Status === 'Enabled';
if (isVersionedObj) {
if (storingResult && storingResult.versionId) {
responseHeaders['x-amz-version-id'] =
versionIdUtils.encode(storingResult.versionId,
config.versionIdEncodingType);
}
}
return callback(null, responseHeaders);
});
});
}
module.exports = objectPost;

View File

@ -19,8 +19,9 @@
}, },
"homepage": "https://github.com/scality/S3#readme", "homepage": "https://github.com/scality/S3#readme",
"dependencies": { "dependencies": {
"@fastify/busboy": "^2.1.1",
"@hapi/joi": "^17.1.0", "@hapi/joi": "^17.1.0",
"arsenal": "git+https://github.com/scality/arsenal#7.70.29", "arsenal": "git+https://github.com/scality/arsenal#b00aea282244cd90efc745941e03d0be7e734fc7",
"async": "~2.5.0", "async": "~2.5.0",
"aws-sdk": "2.905.0", "aws-sdk": "2.905.0",
"azure-storage": "^2.1.0", "azure-storage": "^2.1.0",
@ -60,6 +61,8 @@
}, },
"scripts": { "scripts": {
"ft_awssdk": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/", "ft_awssdk": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/",
"ft_post": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/object/post.js",
"ft_post_aws": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/object/post-copy.js",
"ft_awssdk_aws": "cd tests/functional/aws-node-sdk && AWS_ON_AIR=true mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/", "ft_awssdk_aws": "cd tests/functional/aws-node-sdk && AWS_ON_AIR=true mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/",
"ft_awssdk_buckets": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/bucket", "ft_awssdk_buckets": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/bucket",
"ft_awssdk_objects_misc": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/legacy test/object test/service test/support", "ft_awssdk_objects_misc": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/legacy test/object test/service test/support",

View File

@ -0,0 +1,722 @@
const xml2js = require('xml2js');
const axios = require('axios');
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
const FormData = require('form-data');
const assert = require('assert');
const BucketUtility = require('../../lib/utility/bucket-util');
const getConfig = require('../support/config');
const filename = 'test-file.txt';
const region = 'us-east-1';
let ak;
let sk;
let s3;
const generateBucketName = () => `test-bucket-${crypto.randomBytes(8).toString('hex')}`;
const formatDate = (date) => {
const year = date.getUTCFullYear();
const month = (date.getUTCMonth() + 1).toString().padStart(2, '0');
const day = date.getUTCDate().toString().padStart(2, '0');
return `${year}${month}${day}`;
};
const getSignatureKey = (key, dateStamp, regionName, serviceName) => {
const kDate = crypto.createHmac('sha256', `AWS4${key}`).update(dateStamp).digest();
const kRegion = crypto.createHmac('sha256', kDate).update(regionName).digest();
const kService = crypto.createHmac('sha256', kRegion).update(serviceName).digest();
const kSigning = crypto.createHmac('sha256', kService).update('aws4_request').digest();
return kSigning;
};
const calculateFields = (ak, sk, bucketName, additionalConditions) => {
const service = 's3';
const now = new Date();
const formattedDate = now.toISOString().replace(/[:-]|\.\d{3}/g, '');
const shortFormattedDate = formatDate(now);
const credential = `${ak}/${shortFormattedDate}/${region}/${service}/aws4_request`;
const conditionsFields = [
{ bucket: bucketName },
{ key: filename },
{ 'x-amz-credential': credential },
{ 'x-amz-algorithm': 'AWS4-HMAC-SHA256' },
{ 'x-amz-date': formattedDate },
];
if (additionalConditions) {
additionalConditions.forEach(field => {
const key = Object.keys(field)[0];
const value = field[key];
const index = conditionsFields.findIndex(condition => condition.hasOwnProperty(key));
if (index !== -1) {
conditionsFields[index][key] = value;
} else {
conditionsFields.push({ [key]: value });
}
});
}
const policy = {
// 15 minutes from now
expiration: new Date(new Date().getTime() + 15 * 60 * 1000).toISOString(),
conditions: conditionsFields,
};
const policyBase64 = Buffer.from(JSON.stringify(policy)).toString('base64');
const signingKey = getSignatureKey(sk, shortFormattedDate, region, service);
const signature = crypto.createHmac('sha256', signingKey).update(policyBase64).digest('hex');
const returnFields = [
{ name: 'X-Amz-Credential', value: credential },
{ name: 'X-Amz-Algorithm', value: 'AWS4-HMAC-SHA256' },
{ name: 'X-Amz-Signature', value: signature },
{ name: 'X-Amz-Date', value: formattedDate },
{ name: 'Policy', value: policyBase64 },
{ name: 'bucket', value: bucketName },
{ name: 'key', value: filename },
];
if (!additionalConditions) {
return returnFields;
}
if (additionalConditions) {
additionalConditions.forEach(field => {
const key = Object.keys(field)[0];
const value = field[key];
const index = returnFields.findIndex(f => f.name === key);
if (index !== -1) {
returnFields[index].value = value;
} else {
returnFields.push({ name: key, value });
}
});
}
return returnFields;
};
describe('POST object', () => {
let bucketUtil;
let config;
const testContext = {};
before(() => {
config = getConfig('default');
ak = config.credentials.accessKeyId;
sk = config.credentials.secretAccessKey;
bucketUtil = new BucketUtility('default');
s3 = bucketUtil.s3;
});
beforeEach(done => {
const bucketName = generateBucketName();
const url = `${config.endpoint}/${bucketName}`;
testContext.bucketName = bucketName;
testContext.url = url;
const filePath = path.join(__dirname, filename);
const fileContent = 'This is a test file';
fs.writeFile(filePath, fileContent, err => {
if (err) {
return done(err);
}
// Create the bucket
return s3.createBucket({ Bucket: bucketName }, async (err) => {
if (err) {
return done(err);
}
return done();
});
});
});
afterEach(() => {
const { bucketName } = testContext;
const filePath = path.join(__dirname, filename);
// Delete the file
fs.unlink(filePath, err => {
if (err) {
throw err;
}
process.stdout.write('Emptying bucket');
return bucketUtil.empty(bucketName)
.then(() => {
process.stdout.write('Deleting bucket');
return bucketUtil.deleteOne(bucketName);
})
.catch(err => {
if (err.code !== 'NoSuchBucket') {
process.stdout.write('Error in afterEach');
throw err;
}
});
});
});
it('should successfully upload an object using a POST form', done => {
const { bucketName, url } = testContext;
const fields = calculateFields(ak, sk, bucketName);
const formData = new FormData();
fields.forEach(field => {
formData.append(field.name, field.value);
});
formData.append('file', fs.createReadStream(path.join(__dirname, filename)));
formData.getLength((err, length) => {
if (err) {
return done(err);
}
return axios.post(url, formData, {
headers: {
...formData.getHeaders(),
'Content-Length': length,
},
})
.then(response => {
assert.equal(response.status, 204);
done();
})
.catch(err => {
done(err);
});
});
});
it('should handle error when bucket does not exist', done => {
const fakeBucketName = generateBucketName();
const tempUrl = `${config.endpoint}/${fakeBucketName}`;
const fields = calculateFields(ak, sk, fakeBucketName);
const formData = new FormData();
fields.forEach(field => {
formData.append(field.name, field.value);
});
formData.append('file', fs.createReadStream(path.join(__dirname, filename)));
formData.getLength((err, length) => {
if (err) {
return done(err);
}
return axios.post(tempUrl, formData, {
headers: {
...formData.getHeaders(),
'Content-Length': length,
},
})
.then(() => {
done(new Error('Expected error but got success response'));
})
.catch(err => {
assert.equal(err.response.status, 404);
done();
});
});
});
it('should successfully upload a larger file to S3 using a POST form', done => {
const { bucketName, url } = testContext;
const largeFileName = 'large-test-file.txt';
const largeFilePath = path.join(__dirname, largeFileName);
const largeFileContent = 'This is a larger test file'.repeat(10000); // Simulate a larger file
fs.writeFile(largeFilePath, largeFileContent, err => {
if (err) {
return done(err);
}
const fields = calculateFields(ak, sk, bucketName, [{ key: largeFileName }]);
const formData = new FormData();
fields.forEach(field => {
formData.append(field.name, field.value);
});
formData.append('file', fs.createReadStream(largeFilePath));
return formData.getLength((err, length) => {
if (err) {
return done(err);
}
return axios.post(url, formData, {
headers: {
...formData.getHeaders(),
'Content-Length': length,
},
})
.then(response => {
assert.equal(response.status, 204);
s3.listObjectsV2({ Bucket: bucketName }, (err, data) => {
if (err) {
fs.unlink(largeFilePath, () => done(err)); // Clean up and propagate the error
return;
}
const uploadedFile = data.Contents.find(item => item.Key === path.basename(largeFileName));
assert(uploadedFile, 'Uploaded file should exist in the bucket');
assert.equal(uploadedFile.Size, Buffer.byteLength(largeFileContent),
'File size should match');
fs.unlink(largeFilePath, done); // Clean up the large file
});
})
.catch(err => {
fs.unlink(largeFilePath, () => done(err)); // Clean up and propagate the error
});
});
});
});
it('should be able to post an empty file and verify its existence', done => {
const { bucketName, url } = testContext;
const emptyFilePath = path.join(__dirname, 'empty-file.txt');
// Create an empty file
fs.writeFile(emptyFilePath, '', err => {
if (err) {
return done(err);
}
const fields = calculateFields(ak, sk, bucketName);
const formData = new FormData();
fields.forEach(field => {
formData.append(field.name, field.value);
});
formData.append('file', fs.createReadStream(emptyFilePath));
return formData.getLength((err, length) => {
if (err) {
return done(err);
}
return axios.post(url, formData, {
headers: {
...formData.getHeaders(),
'Content-Length': length,
},
})
.then(response => {
assert.equal(response.status, 204);
// Check if the object exists using listObjects
return s3.listObjects({ Bucket: bucketName, Prefix: filename }, (err, data) => {
if (err) {
return done(err);
}
const fileExists = data.Contents.some(item => item.Key === filename);
const file = data.Contents.find(item => item.Key === filename);
assert.equal(file.Size, 0);
if (!fileExists) {
return done(new Error('File does not exist in S3'));
}
// Clean up: delete the empty file locally and from S3
return fs.unlink(emptyFilePath, err => {
if (err) {
return done(err);
}
return s3.deleteObject({ Bucket: bucketName, Key: filename }, err => {
if (err) {
return done(err);
}
return done();
});
});
});
})
.catch(err => {
done(err);
});
});
});
});
it('should handle error when file is missing', done => {
const { bucketName, url } = testContext;
const fields = calculateFields(ak, sk, bucketName);
const formData = new FormData();
fields.forEach(field => {
formData.append(field.name, field.value);
});
formData.getLength((err, length) => {
if (err) {
return done(err);
}
return axios.post(url, formData, {
headers: {
...formData.getHeaders(),
'Content-Length': length,
},
})
.then(() => {
done(new Error('Expected error but got success response'));
})
.catch(err => {
assert.equal(err.response.status, 400);
done();
});
});
});
it('should upload an object with key slash', done => {
const { bucketName, url } = testContext;
const slashKey = '/';
const fields = calculateFields(ak, sk, bucketName, [{ key: slashKey }]);
const formData = new FormData();
fields.forEach(field => {
formData.append(field.name, field.value);
});
formData.append('file', fs.createReadStream(path.join(__dirname, filename)));
formData.getLength((err, length) => {
if (err) {
return done(err);
}
return axios.post(url, formData, {
headers: {
...formData.getHeaders(),
'Content-Length': length,
},
})
.then(response => {
assert.equal(response.status, 204);
done();
})
.catch(err => {
done(err);
});
});
});
it('should return an error if form data (excluding file) exceeds 20KB', done => {
const { bucketName, url } = testContext;
const fields = calculateFields(ak, sk, bucketName);
// Add additional fields to make form data exceed 20KB
const largeValue = 'A'.repeat(1024); // 1KB value
for (let i = 0; i < 21; i++) { // Add 21 fields of 1KB each to exceed 20KB
fields.push({ name: `field${i}`, value: largeValue });
}
const formData = new FormData();
fields.forEach(field => {
formData.append(field.name, field.value);
});
formData.append('file', fs.createReadStream(path.join(__dirname, filename)));
return formData.getLength((err, length) => {
if (err) {
return done(err);
}
return axios.post(url, formData, {
headers: {
...formData.getHeaders(),
'Content-Length': length,
},
})
.then(() => {
done(new Error('Request should not succeed with form data exceeding 20KB'));
})
.catch(err => {
assert.ok(err.response, 'Error should be returned by axios');
// Parse the XML error response
xml2js.parseString(err.response.data, (err, result) => {
if (err) {
return done(err);
}
const error = result.Error;
assert.equal(error.Code[0], 'MaxPostPreDataLengthExceeded');
assert.equal(error.Message[0],
'Your POST request fields preceeding the upload file was too large.');
return done();
});
});
});
});
it('should successfully upload an object with bucket versioning enabled and verify version ID', done => {
const { url, bucketName } = testContext;
// Enable versioning on the bucket
const versioningParams = {
Bucket: bucketName,
VersioningConfiguration: {
Status: 'Enabled',
},
};
return s3.putBucketVersioning(versioningParams, (err) => {
if (err) {
return done(err);
}
const fields = calculateFields(ak, sk, bucketName, [{ bucket: bucketName }]);
const formData = new FormData();
fields.forEach(field => {
formData.append(field.name, field.value);
});
formData.append('file', fs.createReadStream(path.join(__dirname, 'test-file.txt')));
return formData.getLength((err, length) => {
if (err) {
return done(err);
}
return axios.post(url, formData, {
headers: {
...formData.getHeaders(),
'Content-Length': length,
},
})
.then(response => {
assert.equal(response.status, 204);
// Verify version ID is present in the response
const versionId = response.headers['x-amz-version-id'];
assert.ok(versionId, 'Version ID should be present in the response headers');
done();
})
.catch(err => {
done(err);
});
});
});
});
it('should handle error when signature is invalid', done => {
const { url, bucketName } = testContext;
const fields = calculateFields(ak, sk, bucketName);
fields.push({ name: 'X-Amz-Signature', value: 'invalid-signature' });
const formData = new FormData();
fields.forEach(field => {
formData.append(field.name, field.value);
});
formData.append('file', fs.createReadStream(path.join(__dirname, 'test-file.txt')));
formData.getLength((err, length) => {
if (err) {
return done(err);
}
return axios.post(url, formData, {
headers: {
...formData.getHeaders(),
'Content-Length': length,
},
})
.then(() => done(new Error('Expected error but got success response')))
.catch(err => {
assert.equal(err.response.status, 403);
return xml2js.parseString(err.response.data, (err, result) => {
if (err) {
return done(err);
}
const error = result.Error;
assert.equal(error.Code[0], 'SignatureDoesNotMatch');
assert.equal(error.Message[0],
'The request signature we calculated does not match the signature you provided.');
return done();
});
});
});
});
it('should return an error when signature includes invalid data', done => {
const { url, bucketName } = testContext;
let fields = calculateFields(ak, sk, bucketName);
const laterThanNow = new Date(new Date().getTime() + 60000);
const shortFormattedDate = formatDate(laterThanNow);
const signingKey = getSignatureKey(sk, shortFormattedDate, 'ap-east-1', 's3');
const signature = crypto.createHmac('sha256', signingKey).update(fields.find(field =>
field.name === 'Policy').value).digest('hex');
// Modify the signature to be invalid
fields = fields.map(field => {
if (field.name === 'X-Amz-Signature') {
return { name: 'X-Amz-Signature', value: signature };
}
return field;
});
const formData = new FormData();
fields.forEach(field => {
formData.append(field.name, field.value);
});
formData.append('file', fs.createReadStream(path.join(__dirname, 'test-file.txt')));
return formData.getLength((err, length) => {
if (err) {
return done(err);
}
return axios.post(url, formData, {
headers: {
...formData.getHeaders(),
'Content-Length': length,
},
})
.then(() => done(new Error('Request should not succeed with an invalid signature')))
.catch(err => {
assert.ok(err.response, 'Error should be returned by axios');
// Parse the XML error response
return xml2js.parseString(err.response.data, (parseErr, result) => {
if (parseErr) {
return done(parseErr);
}
const error = result.Error;
assert.equal(
error.Code[0],
'SignatureDoesNotMatch',
'Expected SignatureDoesNotMatch error code'
);
return done();
});
});
});
});
it('should return an error for invalid keys', done => {
const { url, bucketName } = testContext;
const invalidAccessKeyId = 'INVALIDACCESSKEY';
const invalidSecretAccessKey = 'INVALIDSECRETKEY';
let fields = calculateFields(invalidAccessKeyId, invalidSecretAccessKey, bucketName);
// Modify the signature to be invalid
fields = fields.map(field => {
if (field.name === 'X-Amz-Signature') {
return { name: 'X-Amz-Signature', value: 'invalid-signature' };
}
return field;
});
const formData = new FormData();
fields.forEach(field => {
formData.append(field.name, field.value);
});
formData.append('file', fs.createReadStream(path.join(__dirname, 'test-file.txt')));
formData.getLength((err, length) => {
if (err) {
return done(err);
}
return axios.post(url, formData, {
headers: {
...formData.getHeaders(),
'Content-Length': length,
},
})
.then(() => done(new Error('Request should not succeed with an invalid keys')))
.catch(err => {
assert.ok(err.response, 'Error should be returned by axios');
// Parse the XML error response
return xml2js.parseString(err.response.data, (parseErr, result) => {
if (parseErr) {
return done(parseErr);
}
const error = result.Error;
assert.equal(error.Code[0], 'InvalidAccessKeyId', 'Expected InvalidAccessKeyId error code');
return done();
});
});
});
});
it('should return an error for invalid credential', done => {
const { url, bucketName } = testContext;
let fields = calculateFields(ak, sk, bucketName);
const laterThanNow = new Date(new Date().getTime() + 60000);
const shortFormattedDate = formatDate(laterThanNow);
const credential = `${ak}/${shortFormattedDate}/eu-west-1/blabla/aws4_request`;
// Modify the signature to be invalid
fields = fields.map(field => {
if (field.name === 'X-Amz-Credential') {
return { name: 'X-Amz-Credential', value: credential };
}
return field;
});
const formData = new FormData();
fields.forEach(field => {
formData.append(field.name, field.value);
});
formData.append('file', fs.createReadStream(path.join(__dirname, 'test-file.txt')));
formData.getLength((err, length) => {
if (err) {
return done(err);
}
return axios.post(url, formData, {
headers: {
...formData.getHeaders(),
'Content-Length': length,
},
})
.then(() => done(new Error('Request should not succeed with an invalid credential')))
.catch(err => {
assert.ok(err.response, 'Error should be returned by axios');
// Parse the XML error response
return xml2js.parseString(err.response.data, (parseErr, result) => {
if (parseErr) {
return done(parseErr);
}
const error = result.Error;
assert.equal(error.Code[0], 'InvalidArgument', 'Expected InvalidArgument error code');
return done();
});
});
});
});
});

View File

@ -0,0 +1,439 @@
const assert = require('assert');
const async = require('async');
const moment = require('moment');
const { errors } = require('arsenal');
const sinon = require('sinon');
const { bucketPut } = require('../../../lib/api/bucketPut');
const bucketPutObjectLock = require('../../../lib/api/bucketPutObjectLock');
const bucketPutVersioning = require('../../../lib/api/bucketPutVersioning');
const { cleanup, DummyRequestLogger, makeAuthInfo, versioningTestUtils }
= require('../helpers');
const { ds } = require('arsenal').storage.data.inMemory.datastore;
const metadata = require('../metadataswitch');
const objectPost = require('../../../lib/api/objectPost');
const { objectLockTestUtils } = require('../helpers');
const DummyRequest = require('../DummyRequest');
const mpuUtils = require('../utils/mpuUtils');
const any = sinon.match.any;
const log = new DummyRequestLogger();
const canonicalID = 'accessKey1';
const authInfo = makeAuthInfo(canonicalID);
const bucketName = 'bucketname123';
const postBody = Buffer.from('I am a body', 'utf8');
const correctMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a';
const mockDate = new Date(2050, 10, 12);
const testPutBucketRequest = new DummyRequest({
bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/',
});
const testPutBucketRequestLock = new DummyRequest({
bucketName,
headers: {
'host': `${bucketName}.s3.amazonaws.com`,
'x-amz-bucket-object-lock-enabled': 'true',
},
url: '/',
});
const originalputObjectMD = metadata.putObjectMD;
const objectName = 'objectName';
let testPostObjectRequest;
const enableVersioningRequest =
versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Enabled');
const suspendVersioningRequest =
versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Suspended');
describe('objectPost API', () => {
beforeEach(() => {
cleanup();
sinon.spy(metadata, 'putObjectMD');
testPostObjectRequest = new DummyRequest({
bucketName,
formData: {
bucket: bucketName,
key: objectName,
},
fileEventData: {},
headers: { host: `${bucketName}.s3.amazonaws.com` },
url: '/',
}, postBody);
});
afterEach(() => {
sinon.restore();
metadata.putObjectMD = originalputObjectMD;
});
it('should return an error if the bucket does not exist', done => {
objectPost(authInfo, testPostObjectRequest, undefined, log, err => {
assert.deepStrictEqual(err, errors.NoSuchBucket);
done();
});
});
it('should successfully post an object', done => {
const testPostObjectRequest = new DummyRequest({
bucketName,
formData: {
bucket: bucketName,
key: objectName,
},
fileEventData: {},
headers: {},
url: '/',
calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==',
}, postBody);
bucketPut(authInfo, testPutBucketRequest, log, () => {
objectPost(authInfo, testPostObjectRequest, undefined, log,
(err, resHeaders) => {
assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`);
metadata.getObjectMD(bucketName, objectName,
{}, log, (err, md) => {
assert(md);
assert
.strictEqual(md['content-md5'], correctMD5);
done();
});
});
});
});
const mockModes = ['GOVERNANCE', 'COMPLIANCE'];
mockModes.forEach(mockMode => {
it(`should post an object with valid date & ${mockMode} mode`, done => {
const testPostObjectRequest = new DummyRequest({
bucketName,
formData: {
bucket: bucketName,
key: objectName,
},
fileEventData: {},
headers: {
'x-amz-object-lock-retain-until-date': mockDate,
'x-amz-object-lock-mode': mockMode,
},
url: '/',
calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==',
}, postBody);
bucketPut(authInfo, testPutBucketRequestLock, log, () => {
objectPost(authInfo, testPostObjectRequest, undefined, log,
(err, headers) => {
assert.ifError(err);
assert.strictEqual(headers.ETag, `"${correctMD5}"`);
metadata.getObjectMD(bucketName, objectName, {}, log,
(err, md) => {
const mode = md.retentionMode;
const retainUntilDate = md.retentionDate;
assert.ifError(err);
assert(md);
assert.strictEqual(mode, mockMode);
assert.strictEqual(retainUntilDate, mockDate);
done();
});
});
});
});
});
const formatTime = time => time.slice(0, 20);
const testObjectLockConfigs = [
{
testMode: 'COMPLIANCE',
val: 30,
type: 'Days',
},
{
testMode: 'GOVERNANCE',
val: 5,
type: 'Years',
},
];
testObjectLockConfigs.forEach(config => {
const { testMode, type, val } = config;
it('should put an object with default retention if object does not ' +
'have retention configuration but bucket has', done => {
const testPostObjectRequest = new DummyRequest({
bucketName,
formData: {
bucket: bucketName,
key: objectName,
},
fileEventData: {},
headers: {},
url: '/',
calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==',
}, postBody);
const testObjLockRequest = {
bucketName,
headers: { host: `${bucketName}.s3.amazonaws.com` },
post: objectLockTestUtils.generateXml(testMode, val, type),
};
bucketPut(authInfo, testPutBucketRequestLock, log, () => {
bucketPutObjectLock(authInfo, testObjLockRequest, log, () => {
objectPost(authInfo, testPostObjectRequest, undefined, log,
(err, headers) => {
assert.ifError(err);
assert.strictEqual(headers.ETag, `"${correctMD5}"`);
metadata.getObjectMD(bucketName, objectName, {},
log, (err, md) => {
const mode = md.retentionMode;
const retainDate = md.retentionDate;
const date = moment();
const days
= type === 'Days' ? val : val * 365;
const expectedDate
= date.add(days, 'days');
assert.ifError(err);
assert.strictEqual(mode, testMode);
assert.strictEqual(formatTime(retainDate),
formatTime(expectedDate.toISOString()));
done();
});
});
});
});
});
});
it('should successfully put an object with legal hold ON', done => {
const request = new DummyRequest({
bucketName,
formData: {
bucket: bucketName,
key: objectName,
},
fileEventData: {},
headers: {
'x-amz-object-lock-legal-hold': 'ON',
},
url: '/',
calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==',
}, postBody);
bucketPut(authInfo, testPutBucketRequestLock, log, () => {
objectPost(authInfo, request, undefined, log, (err, headers) => {
assert.ifError(err);
assert.strictEqual(headers.ETag, `"${correctMD5}"`);
metadata.getObjectMD(bucketName, objectName, {}, log,
(err, md) => {
assert.ifError(err);
assert.strictEqual(md.legalHold, true);
done();
});
});
});
});
it('should successfully put an object with legal hold OFF', done => {
const request = new DummyRequest({
bucketName,
formData: {
bucket: bucketName,
key: objectName,
},
fileEventData: {},
headers: {
'x-amz-object-lock-legal-hold': 'OFF',
},
url: '/',
calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==',
}, postBody);
bucketPut(authInfo, testPutBucketRequestLock, log, () => {
objectPost(authInfo, request, undefined, log, (err, headers) => {
assert.ifError(err);
assert.strictEqual(headers.ETag, `"${correctMD5}"`);
metadata.getObjectMD(bucketName, objectName, {}, log,
(err, md) => {
assert.ifError(err);
assert(md);
assert.strictEqual(md.legalHold, false);
done();
});
});
});
});
it('should not leave orphans in data when overwriting an object', done => {
const testPostObjectRequest2 = new DummyRequest({
bucketName,
formData: {
bucket: bucketName,
key: objectName,
},
fileEventData: {},
headers: {},
url: '/',
}, Buffer.from('I am another body', 'utf8'));
bucketPut(authInfo, testPutBucketRequest, log, () => {
objectPost(authInfo, testPostObjectRequest,
undefined, log, () => {
objectPost(authInfo, testPostObjectRequest2, undefined,
log,
() => {
// orphan objects don't get deleted
// until the next tick
// in memory
setImmediate(() => {
// Data store starts at index 1
assert.strictEqual(ds[0], undefined);
assert.strictEqual(ds[1], undefined);
assert.deepStrictEqual(ds[2].value,
Buffer.from('I am another body', 'utf8'));
done();
});
});
});
});
});
it('should not leave orphans in data when overwriting an multipart upload object', done => {
bucketPut(authInfo, testPutBucketRequest, log, () => {
mpuUtils.createMPU('default', bucketName, objectName, log,
(err, testUploadId) => {
objectPost(authInfo, testPostObjectRequest, undefined, log, err => {
assert.ifError(err);
sinon.assert.calledWith(metadata.putObjectMD,
any, any, any, sinon.match({ oldReplayId: testUploadId }), any, any);
done();
});
});
});
});
describe('objectPost API with versioning', () => {
beforeEach(() => {
cleanup();
});
const objData = ['foo0', 'foo1', 'foo2'].map(str =>
Buffer.from(str, 'utf8'));
const testPostObjectRequests = objData.map(data => versioningTestUtils
.createPostObjectRequest(bucketName, objectName, data));
it('should delete latest version when creating new null version ' +
'if latest version is null version', done => {
async.series([
callback => bucketPut(authInfo, testPutBucketRequest, log,
callback),
// putting null version by putting obj before versioning configured
callback => objectPost(authInfo, testPostObjectRequests[0], undefined,
log, err => {
versioningTestUtils.assertDataStoreValues(ds, [objData[0]]);
callback(err);
}),
callback => bucketPutVersioning(authInfo, suspendVersioningRequest,
log, callback),
// creating new null version by putting obj after ver suspended
callback => objectPost(authInfo, testPostObjectRequests[1],
undefined, log, err => {
// wait until next tick since mem backend executes
// deletes in the next tick
setImmediate(() => {
// old null version should be deleted
versioningTestUtils.assertDataStoreValues(ds,
[undefined, objData[1]]);
callback(err);
});
}),
// create another null version
callback => objectPost(authInfo, testPostObjectRequests[2],
undefined, log, err => {
setImmediate(() => {
// old null version should be deleted
versioningTestUtils.assertDataStoreValues(ds,
[undefined, undefined, objData[2]]);
callback(err);
});
}),
], done);
});
describe('when null version is not the latest version', () => {
const objData = ['foo0', 'foo1', 'foo2'].map(str =>
Buffer.from(str, 'utf8'));
const testPostObjectRequests = objData.map(data => versioningTestUtils
.createPostObjectRequest(bucketName, objectName, data));
beforeEach(done => {
async.series([
callback => bucketPut(authInfo, testPutBucketRequest, log,
callback),
// putting null version: put obj before versioning configured
callback => objectPost(authInfo, testPostObjectRequests[0],
undefined, log, callback),
callback => bucketPutVersioning(authInfo,
enableVersioningRequest, log, callback),
// put another version:
callback => objectPost(authInfo, testPostObjectRequests[1],
undefined, log, callback),
callback => bucketPutVersioning(authInfo,
suspendVersioningRequest, log, callback),
], err => {
if (err) {
return done(err);
}
versioningTestUtils.assertDataStoreValues(ds,
objData.slice(0, 2));
return done();
});
});
it('should still delete null version when creating new null version',
done => {
objectPost(authInfo, testPostObjectRequests[2], undefined,
log, err => {
assert.ifError(err, `Unexpected err: ${err}`);
setImmediate(() => {
// old null version should be deleted after putting
// new null version
versioningTestUtils.assertDataStoreValues(ds,
[undefined, objData[1], objData[2]]);
done(err);
});
});
});
});
it('should return BadDigest error and not leave orphans in data when ' +
'contentMD5 and completedHash do not match', done => {
const testPostObjectRequests = new DummyRequest({
bucketName,
formData: {
bucket: bucketName,
key: objectName,
},
fileEventData: {},
headers: {},
url: '/',
contentMD5: 'vnR+tLdVF79rPPfF+7YvOg==',
}, Buffer.from('I am another body', 'utf8'));
bucketPut(authInfo, testPutBucketRequest, log, () => {
objectPost(authInfo, testPostObjectRequests, undefined, log,
err => {
assert.deepStrictEqual(err, errors.BadDigest);
// orphan objects don't get deleted
// until the next tick
// in memory
setImmediate(() => {
// Data store starts at index 1
assert.strictEqual(ds[0], undefined);
assert.strictEqual(ds[1], undefined);
done();
});
});
});
});
});
});

View File

@ -374,6 +374,19 @@ const versioningTestUtils = {
}; };
return new DummyRequest(params, body); return new DummyRequest(params, body);
}, },
createPostObjectRequest: (bucketName, keyName, body) => {
const params = {
bucketName,
formData: {
bucket: bucketName,
key: keyName,
},
fileEventData: {},
headers: {},
url: '/',
};
return new DummyRequest(params, body);
},
createBucketPutVersioningReq: (bucketName, status) => { createBucketPutVersioningReq: (bucketName, status) => {
const request = { const request = {
bucketName, bucketName,

View File

@ -16,6 +16,11 @@
enabled "2.0.x" enabled "2.0.x"
kuler "^2.0.0" kuler "^2.0.0"
"@fastify/busboy@^2.1.1":
version "2.1.1"
resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.1.1.tgz#b9da6a878a371829a0502c9b6c1c143ef6663f4d"
integrity sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==
"@gar/promisify@^1.0.1": "@gar/promisify@^1.0.1":
version "1.1.3" version "1.1.3"
resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6" resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6"
@ -499,9 +504,9 @@ arraybuffer.slice@~0.0.7:
optionalDependencies: optionalDependencies:
ioctl "^2.0.2" ioctl "^2.0.2"
"arsenal@git+https://github.com/scality/arsenal#7.70.29": "arsenal@git+https://github.com/scality/arsenal#b00aea282244cd90efc745941e03d0be7e734fc7":
version "7.70.29" version "7.70.29"
resolved "git+https://github.com/scality/arsenal#a643a3e6ccbc49327339a285de1d4cb17afcd171" resolved "git+https://github.com/scality/arsenal#b00aea282244cd90efc745941e03d0be7e734fc7"
dependencies: dependencies:
"@js-sdsl/ordered-set" "^4.4.2" "@js-sdsl/ordered-set" "^4.4.2"
"@types/async" "^3.2.12" "@types/async" "^3.2.12"