Compare commits

...

5 Commits

Author SHA1 Message Date
williamlardier 8d867a59e2
Merge remote-tracking branch 'origin/improvement/CLDSRV-402-optimize-multiobjectdelete-api' into w/8.5/improvement/CLDSRV-402-optimize-multiobjectdelete-api 2023-06-15 14:10:36 +02:00
williamlardier d0daff7daa
CLDSRV-402: add unit tests 2023-06-15 13:55:28 +02:00
williamlardier 811e496829
[temp] CLDSRV-402: bump arsenal dep 2023-06-15 13:55:27 +02:00
williamlardier 03d19d89fa
CLDSRV-402: batch calls to storage services 2023-06-15 10:52:39 +02:00
williamlardier dee753e7cc
CLDSRV-402: optimize multideleteobject API
- Parallelism is increased to reduce the latency
- If the backend supports it, batching is used
2023-06-15 10:52:39 +02:00
8 changed files with 622 additions and 173 deletions

View File

@ -200,6 +200,8 @@ const constants = {
validStorageClasses: [
'STANDARD',
],
// Backends that support metadata batching
supportsBatchingMethods: ['mongodb'],
};
module.exports = constants;

View File

@ -17,12 +17,15 @@ const { preprocessingVersioningDelete }
= require('./apiUtils/object/versioning');
const createAndStoreObject = require('./apiUtils/object/createAndStoreObject');
const monitoring = require('../utilities/monitoringHandler');
const { metadataGetObject } = require('../metadata/metadataUtils');
const { metadataGetObject, metadataGetObjects } = require('../metadata/metadataUtils');
const { config } = require('../Config');
const { isRequesterNonAccountUser } = require('./apiUtils/authorization/permissionChecks');
const { hasGovernanceBypassHeader, checkUserGovernanceBypass, ObjectLockInfo }
= require('./apiUtils/object/objectLockHelpers');
const requestUtils = policies.requestUtils;
const { data } = require('../data/wrapper');
const logger = require('../utilities/logger');
const constants = require('../../constants');
const versionIdUtils = versioning.VersionID;
@ -167,6 +170,60 @@ function _parseXml(xmlToParse, next) {
});
}
/**
* processObjectVersion - process object version to be deleted
* @param {object} entry - entry from data model
* @param {string} bucketName - bucket name
* @param {function} next - callback to call with error or decoded version
* @return {undefined}
**/
function processObjectVersion(entry, bucketName) {
let decodedVersionId;
if (entry.versionId) {
decodedVersionId = entry.versionId === 'null' ?
'null' : versionIdUtils.decode(entry.versionId);
}
if (decodedVersionId instanceof Error) {
monitoring.promMetrics('DELETE', bucketName, 404,
'multiObjectDelete');
return [errors.NoSuchVersion];
}
return [null, decodedVersionId];
}
/**
* Initialization function for the MultiObjectDelete API that will, based on the
* current metadata backend,a ssess if metadata READ batching is supported. If
* yes, the initialization step will call the metadataGetObjects function from
* the MetadataWrapper.
* @param {string} bucketName - bucket name
* @param {string []} inPlay - list of object keys still in play
* @param {object} log - logger object
* @return {undefined}
*/
function shouldBatchGetObjectsFromMetadata(bucketName, inPlay, log) {
const backendSupportsBatching = constants.supportsBatchingMethods.includes(config.backends.metadata);
// If the backend supports batching, we want to optimize the API latency by
// first getting all the objects metadata, stored in memory, for later use
// in the API. This approach does not change the API architecture, but
// transplants an additional piece of code that can greatly improve the API
// latency when the database supports batching.
if (backendSupportsBatching) {
const objectKeys = Object.keys(inPlay).map(entry => {
const [err, versionId] = processObjectVersion(inPlay[entry], bucketName);
if (err) {
return null;
}
return {
versionId,
inPlay: inPlay[entry],
};
});
return callback => metadataGetObjects(bucketName, objectKeys, log, callback);
}
return callback => callback();
}
/**
* gets object metadata and deletes object
* @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info
@ -192,165 +249,184 @@ function getObjMetadataAndDelete(authInfo, canonicalID, request,
let numOfObjectsRemoved = 0;
const skipError = new Error('skip');
const objectLockedError = new Error('object locked');
let deleteFromStorage = [];
const initialStep = shouldBatchGetObjectsFromMetadata(bucketName, inPlay, log);
// doing 5 requests at a time. note that the data wrapper
// will do 5 parallel requests to data backend to delete parts
return async.forEachLimit(inPlay, 5, (entry, moveOn) => {
async.waterfall([
callback => {
let decodedVersionId;
if (entry.versionId) {
decodedVersionId = entry.versionId === 'null' ?
'null' : versionIdUtils.decode(entry.versionId);
}
if (decodedVersionId instanceof Error) {
monitoring.promMetrics('DELETE', bucketName, 404,
'multiObjectDelete');
return callback(errors.NoSuchVersion);
}
return callback(null, decodedVersionId);
},
// for obj deletes, no need to check acl's at object level
// (authority is at the bucket level for obj deletes)
(versionId, callback) => metadataGetObject(bucketName, entry.key,
versionId, log, (err, objMD) => {
// if general error from metadata return error
if (err) {
monitoring.promMetrics('DELETE', bucketName, err.code,
'multiObjectDelete');
return callback(err);
}
if (!objMD) {
const verCfg = bucket.getVersioningConfiguration();
// To adhere to AWS behavior, create a delete marker
// if trying to delete an object that does not exist
// when versioning has been configured
if (verCfg && !entry.versionId) {
log.debug('trying to delete specific version ' +
' that does not exist');
return callback(null, objMD, versionId);
return async.waterfall([
callback => initialStep((err, cache) => callback(err, cache)),
(cache, callback) => async.forEachLimit(inPlay, 50, (entry, moveOn) => {
async.waterfall([
callback => callback(...processObjectVersion(entry, bucketName)),
// for obj deletes, no need to check acl's at object level
// (authority is at the bucket level for obj deletes)
(versionId, callback) => metadataGetObject(bucketName, entry.key,
versionId, log, (err, objMD) => {
// if general error from metadata return error
if (err) {
monitoring.promMetrics('DELETE', bucketName, err.code,
'multiObjectDelete');
return callback(err);
}
// otherwise if particular key does not exist, AWS
// returns success for key so add to successfullyDeleted
// list and move on
successfullyDeleted.push({ entry });
return callback(skipError);
}
if (versionId && objMD.location &&
Array.isArray(objMD.location) && objMD.location[0]) {
// we need this information for data deletes to AWS
// eslint-disable-next-line no-param-reassign
objMD.location[0].deleteVersion = true;
}
return callback(null, objMD, versionId);
}),
(objMD, versionId, callback) => {
// AWS only returns an object lock error if a version id
// is specified, else continue to create a delete marker
if (!versionId || !bucket.isObjectLockEnabled()) {
return callback(null, null, objMD, versionId);
}
const hasGovernanceBypass = hasGovernanceBypassHeader(request.headers);
if (hasGovernanceBypass && isRequesterNonAccountUser(authInfo)) {
return checkUserGovernanceBypass(request, authInfo, bucket, entry.key, log, error => {
if (error && error.is.AccessDenied) {
log.debug('user does not have BypassGovernanceRetention and object is locked', { error });
return callback(objectLockedError);
if (!objMD) {
const verCfg = bucket.getVersioningConfiguration();
// To adhere to AWS behavior, create a delete marker
// if trying to delete an object that does not exist
// when versioning has been configured
if (verCfg && !entry.versionId) {
log.debug('trying to delete specific version ' +
' that does not exist');
return callback(null, objMD, versionId);
}
// otherwise if particular key does not exist, AWS
// returns success for key so add to successfullyDeleted
// list and move on
successfullyDeleted.push({ entry });
return callback(skipError);
}
if (error) {
return callback(error);
if (versionId && objMD.location &&
Array.isArray(objMD.location) && objMD.location[0]) {
// we need this information for data deletes to AWS
// eslint-disable-next-line no-param-reassign
objMD.location[0].deleteVersion = true;
}
return callback(null, hasGovernanceBypass, objMD, versionId);
return callback(null, objMD, versionId);
}, cache ? cache[`${entry.key}${versionId}`] : null),
(objMD, versionId, callback) => {
// AWS only returns an object lock error if a version id
// is specified, else continue to create a delete marker
if (!versionId || !bucket.isObjectLockEnabled()) {
return callback(null, null, objMD, versionId);
}
const hasGovernanceBypass = hasGovernanceBypassHeader(request.headers);
if (hasGovernanceBypass && isRequesterNonAccountUser(authInfo)) {
return checkUserGovernanceBypass(request, authInfo, bucket, entry.key, log, error => {
if (error && error.is.AccessDenied) {
log.debug('user does not have BypassGovernanceRetention and object is locked',
{ error });
return callback(objectLockedError);
}
if (error) {
return callback(error);
}
return callback(null, hasGovernanceBypass, objMD, versionId);
});
}
return callback(null, hasGovernanceBypass, objMD, versionId);
},
(hasGovernanceBypass, objMD, versionId, callback) => {
// AWS only returns an object lock error if a version id
// is specified, else continue to create a delete marker
if (!versionId || !bucket.isObjectLockEnabled()) {
return callback(null, objMD, versionId);
}
const objLockInfo = new ObjectLockInfo({
mode: objMD.retentionMode,
date: objMD.retentionDate,
legalHold: objMD.legalHold || false,
});
}
return callback(null, hasGovernanceBypass, objMD, versionId);
},
(hasGovernanceBypass, objMD, versionId, callback) => {
// AWS only returns an object lock error if a version id
// is specified, else continue to create a delete marker
if (!versionId || !bucket.isObjectLockEnabled()) {
return callback(null, objMD, versionId);
}
const objLockInfo = new ObjectLockInfo({
mode: objMD.retentionMode,
date: objMD.retentionDate,
legalHold: objMD.legalHold || false,
});
// If the object can not be deleted raise an error
if (!objLockInfo.canModifyObject(hasGovernanceBypass)) {
log.debug('trying to delete locked object');
return callback(objectLockedError);
}
return callback(null, objMD, versionId);
},
(objMD, versionId, callback) => {
const options = preprocessingVersioningDelete(
bucketName, bucket, objMD, versionId, config.nullVersionCompatMode);
const deleteInfo = {};
if (options && options.deleteData) {
deleteInfo.deleted = true;
if (objMD.uploadId) {
// eslint-disable-next-line
options.replayId = objMD.uploadId;
// If the object can not be deleted raise an error
if (!objLockInfo.canModifyObject(hasGovernanceBypass)) {
log.debug('trying to delete locked object');
return callback(objectLockedError);
}
return services.deleteObject(bucketName, objMD,
entry.key, options, log, err =>
callback(err, objMD, deleteInfo));
return callback(null, objMD, versionId);
},
(objMD, versionId, callback) => {
const options = preprocessingVersioningDelete(
bucketName, bucket, objMD, versionId, config.nullVersionCompatMode);
const deleteInfo = {};
if (options && options.deleteData) {
deleteInfo.deleted = true;
if ((bucket.getLifecycleConfiguration && !bucket.getLifecycleConfiguration())
&& (bucket.getNotificationConfiguration && !bucket.getNotificationConfiguration())) {
options.shouldOnlyDelete = true;
}
if (objMD.uploadId) {
// eslint-disable-next-line
options.replayId = objMD.uploadId;
}
return services.deleteObject(bucketName, objMD,
entry.key, options, log, (err, toDelete) => {
if (err) {
return callback(err);
}
if (toDelete) {
deleteFromStorage = deleteFromStorage.concat(toDelete);
}
return callback(null, objMD, deleteInfo);
}, true);
}
deleteInfo.newDeleteMarker = true;
// This call will create a delete-marker
return createAndStoreObject(bucketName, bucket, entry.key,
objMD, authInfo, canonicalID, null, request,
deleteInfo.newDeleteMarker, null, log, (err, result) =>
callback(err, objMD, deleteInfo, result.versionId));
},
], (err, objMD, deleteInfo, versionId) => {
if (err === skipError) {
return moveOn();
} else if (err === objectLockedError) {
errorResults.push({ entry, error: errors.AccessDenied, objectLocked: true });
return moveOn();
} else if (err) {
log.error('error deleting object', { error: err, entry });
errorResults.push({ entry, error: err });
return moveOn();
}
deleteInfo.newDeleteMarker = true;
// This call will create a delete-marker
return createAndStoreObject(bucketName, bucket, entry.key,
objMD, authInfo, canonicalID, null, request,
deleteInfo.newDeleteMarker, null, log, (err, result) =>
callback(err, objMD, deleteInfo, result.versionId));
},
], (err, objMD, deleteInfo, versionId) => {
if (err === skipError) {
if (deleteInfo.deleted && objMD['content-length']) {
numOfObjectsRemoved++;
totalContentLengthDeleted += objMD['content-length'];
}
let isDeleteMarker;
let deleteMarkerVersionId;
// - If trying to delete an object that does not exist (if a new
// delete marker was created)
// - Or if an object exists but no version was specified
// return DeleteMarkerVersionId equals the versionID of the marker
// you just generated and DeleteMarker tag equals true
if (deleteInfo.newDeleteMarker) {
isDeleteMarker = true;
deleteMarkerVersionId = versionIdUtils.encode(versionId);
// In this case we are putting a new object (i.e., the delete
// marker), so we decrement the numOfObjectsRemoved value.
numOfObjectsRemoved--;
// If trying to delete a delete marker, DeleteMarkerVersionId equals
// deleteMarker's versionID and DeleteMarker equals true
} else if (objMD && objMD.isDeleteMarker) {
isDeleteMarker = true;
deleteMarkerVersionId = entry.versionId;
}
successfullyDeleted.push({ entry, isDeleteMarker,
deleteMarkerVersionId });
return moveOn();
} else if (err === objectLockedError) {
errorResults.push({ entry, error: errors.AccessDenied, objectLocked: true });
return moveOn();
} else if (err) {
log.error('error deleting object', { error: err, entry });
errorResults.push({ entry, error: err });
return moveOn();
}
if (deleteInfo.deleted && objMD['content-length']) {
numOfObjectsRemoved++;
totalContentLengthDeleted += objMD['content-length'];
}
let isDeleteMarker;
let deleteMarkerVersionId;
// - If trying to delete an object that does not exist (if a new
// delete marker was created)
// - Or if an object exists but no version was specified
// return DeleteMarkerVersionId equals the versionID of the marker
// you just generated and DeleteMarker tag equals true
if (deleteInfo.newDeleteMarker) {
isDeleteMarker = true;
deleteMarkerVersionId = versionIdUtils.encode(versionId);
// In this case we are putting a new object (i.e., the delete
// marker), so we decrement the numOfObjectsRemoved value.
numOfObjectsRemoved--;
// If trying to delete a delete marker, DeleteMarkerVersionId equals
// deleteMarker's versionID and DeleteMarker equals true
} else if (objMD && objMD.isDeleteMarker) {
isDeleteMarker = true;
deleteMarkerVersionId = entry.versionId;
}
successfullyDeleted.push({ entry, isDeleteMarker,
deleteMarkerVersionId });
return moveOn();
});
},
// end of forEach func
err => {
log.trace('finished deleting objects', { numOfObjectsRemoved });
return next(err, quietSetting, errorResults, numOfObjectsRemoved,
successfullyDeleted, totalContentLengthDeleted, bucket);
});
});
},
// end of forEach func
err => {
// Batch delete all objects
const onDone = () => callback(err, quietSetting, errorResults, numOfObjectsRemoved,
successfullyDeleted, totalContentLengthDeleted, bucket);
if (err && deleteFromStorage.length === 0) {
log.trace('no objects to delete from data backend');
return onDone();
}
// If error but we have objects in the list, delete them to ensure
// consistent state.
log.trace('deleting objects from data backend');
const deleteLog = logger.newRequestLoggerFromSerializedUids(log.getSerializedUids());
return data.batchDelete(deleteFromStorage, null, null, deleteLog, err => {
if (err) {
log.error('error deleting objects from data backend', { error: err });
return onDone();
}
return onDone();
});
}),
], next);
}
/**
@ -576,4 +652,6 @@ function multiObjectDelete(authInfo, request, log, callback) {
module.exports = {
getObjMetadataAndDelete,
multiObjectDelete,
processObjectVersion,
shouldBatchGetObjectsFromMetadata,
};

View File

@ -62,11 +62,16 @@ function getNullVersionFromMaster(bucketName, objectKey, log, cb) {
* @param {string} [versionId] - version of object to retrieve
* @param {RequestLogger} log - request logger
* @param {function} cb - callback
* @param {object} cachedDocument - cached version of the document used for
* abstraction purposes
* @return {undefined} - and call callback with err, bucket md and object md
*/
function metadataGetObject(bucketName, objectKey, versionId, log, cb) {
function metadataGetObject(bucketName, objectKey, versionId, log, cb, cachedDocument = null) {
// versionId may be 'null', which asks metadata to fetch the null key specifically
const options = { versionId, getDeleteMarker: true };
if (cachedDocument) {
return cb(null, cachedDocument);
}
return metadata.getObjectMD(bucketName, objectKey, options, log,
(err, objMD) => {
if (err) {
@ -84,6 +89,40 @@ function metadataGetObject(bucketName, objectKey, versionId, log, cb) {
});
}
/** metadataGetObjects - retrieves specified object or version from metadata. This
* method uses cursors, hence is only compatible with a MongoDB DB backend.
* @param {string} bucketName - name of bucket
* @param {string} objectsKeys - name of object key
* @param {RequestLogger} log - request logger
* @param {function} cb - callback
* @return {undefined} - and call callback with err, bucket md and object md
*/
function metadataGetObjects(bucketName, objectsKeys, log, cb) {
const options = { getDeleteMarker: true };
const objects = objectsKeys.map(objectKey => ({
key: objectKey ? objectKey.inPlay.key : null,
params: options,
versionId: objectKey ? objectKey.versionId : null,
}));
// Returned objects are following the following format: { key, doc, versionId }
// That is required with batching to properly map the objects
return metadata.getObjectsMD(bucketName, objects, log, (err, objMds) => {
if (err) {
log.debug('err getting object MD from metadata', { error: err });
return cb(err);
}
const result = {};
objMds.forEach(objMd => {
if (objMd.doc) {
result[`${objMd.doc.key}${objMd.versionId}`] = objMd.doc;
}
});
return cb(null, result);
});
}
/**
* Validate that a bucket is accessible and authorized to the user,
* return a specific error code otherwise
@ -215,6 +254,7 @@ function metadataValidateBucket(params, log, callback) {
module.exports = {
validateBucket,
metadataGetObject,
metadataGetObjects,
metadataValidateBucketAndObj,
metadataValidateBucket,
};

View File

@ -308,9 +308,12 @@ const services = {
* delete a specific version of the object
* @param {Log} log - logger instance
* @param {function} cb - callback from async.waterfall in objectGet
* @param {boolean} deferDeletion - true if the object should not be removed
* from the storage, but be returned
* instead.
* @return {undefined}
*/
deleteObject(bucketName, objectMD, objectKey, options, log, cb) {
deleteObject(bucketName, objectMD, objectKey, options, log, cb, deferDeletion) {
log.trace('deleting object from bucket');
assert.strictEqual(typeof bucketName, 'string');
assert.strictEqual(typeof objectMD, 'object');
@ -327,12 +330,16 @@ const services = {
log.getSerializedUids());
if (objectMD.location === null) {
return cb(null, res);
} else if (!Array.isArray(objectMD.location)) {
data.delete(objectMD.location, deleteLog);
return cb(null, res);
}
return data.batchDelete(objectMD.location, null, null,
deleteLog, err => {
const locations = Array.isArray(objectMD.location)
? objectMD.location : [objectMD.location];
if (deferDeletion) {
return cb(null, locations);
}
return data.batchDelete(locations, null, null, deleteLog, err => {
if (err) {
return cb(err);
}

View File

@ -20,7 +20,7 @@
"homepage": "https://github.com/scality/S3#readme",
"dependencies": {
"@hapi/joi": "^17.1.0",
"arsenal": "git+https://github.com/scality/arsenal#8.1.87",
"arsenal": "git+https://github.com/scality/arsenal#74d70291c05dae09806b785017556717cc1d8214",
"async": "~2.5.0",
"aws-sdk": "2.905.0",
"azure-storage": "^2.1.0",

View File

@ -1,7 +1,7 @@
const assert = require('assert');
const { errors, storage } = require('arsenal');
const { getObjMetadataAndDelete }
const { processObjectVersion, getObjMetadataAndDelete, shouldBatchGetObjectsFromMetadata }
= require('../../../lib/api/multiObjectDelete');
const { cleanup, DummyRequestLogger, makeAuthInfo } = require('../helpers');
const DummyRequest = require('../DummyRequest');
@ -11,6 +11,7 @@ const objectPut = require('../../../lib/api/objectPut');
const { metadata } = storage.metadata.inMemory.metadata;
const { ds } = storage.data.inMemory.datastore;
const constants = require('../../../constants');
const log = new DummyRequestLogger();
const canonicalID = 'accessKey1';
const authInfo = makeAuthInfo(canonicalID);
@ -186,3 +187,48 @@ describe('getObjMetadataAndDelete function for multiObjectDelete', () => {
});
});
});
describe('shouldBatchGetObjectsFromMetadata', () => {
it('should return a call to the batching method if the backend supports it', done => {
constants.supportsBatchingMethods.push('mem');
const returnedCallback = shouldBatchGetObjectsFromMetadata(bucketName, [], log);
returnedCallback(err => {
assert.strictEqual(err.NotImplemented, true);
constants.supportsBatchingMethods.splice(-1, 1);
return done();
});
});
it('should not return a call to the batching method if the backend does not support it', done => {
const returnedCallback = shouldBatchGetObjectsFromMetadata(bucketName, [], log);
returnedCallback((err, cache) => {
assert.strictEqual(err, undefined);
assert.strictEqual(cache, undefined);
return done();
});
});
});
describe('processObjectVersion function helper', () => {
const bucketName = 'bucketName';
it('should throw error for invalid version IDs', () => {
const ret = processObjectVersion({
versionId: '\0',
}, bucketName);
assert(ret[0].is.NoSuchVersion);
});
it('should return "null" for null versionId', () => {
const ret = processObjectVersion({
versionId: 'null',
}, bucketName);
assert.strictEqual(ret[0], null);
assert.strictEqual(ret[1], 'null');
});
it('should return null error on success', () => {
const ret = processObjectVersion({}, bucketName);
assert.ifError(ret[0]);
assert.deepStrictEqual(ret[1], undefined);
});
});

View File

@ -1,4 +1,5 @@
const assert = require('assert');
const sinon = require('sinon');
const { models } = require('arsenal');
const { BucketInfo } = models;
@ -13,7 +14,8 @@ const bucket = new BucketInfo('niftyBucket', ownerCanonicalId,
authInfo.getAccountDisplayName(), creationDate);
const log = new DummyRequestLogger();
const { validateBucket } = require('../../../lib/metadata/metadataUtils');
const { validateBucket, metadataGetObjects, metadataGetObject } = require('../../../lib/metadata/metadataUtils');
const metadata = require('../../../lib/metadata/wrapper');
describe('validateBucket', () => {
it('action bucketPutPolicy by bucket owner', () => {
@ -53,3 +55,91 @@ describe('validateBucket', () => {
assert(validationResult.is.AccessDenied);
});
});
describe('metadataGetObjects', () => {
let sandbox;
const objectsKeys = [
{ inPlay: { key: 'objectKey1' }, versionId: 'versionId1' },
{ inPlay: { key: 'objectKey2' }, versionId: 'versionId2' },
];
beforeEach(() => {
sandbox = sinon.createSandbox();
});
afterEach(() => {
sandbox.restore();
});
it('should return error if metadata.getObjectsMD fails', done => {
const error = new Error('Failed to get object metadata');
sandbox.stub(metadata, 'getObjectsMD').yields(error);
metadataGetObjects('bucketName', objectsKeys, log, err => {
assert(err);
assert.strictEqual(err, error);
done();
});
});
it('should return object metadata if successful', done => {
const metadataObjs = [
{ doc: { key: 'objectKey1' }, versionId: 'versionId1' },
{ doc: { key: 'objectKey2' }, versionId: 'versionId2' },
];
sandbox.stub(metadata, 'getObjectsMD').yields(null, metadataObjs);
metadataGetObjects('bucketName', objectsKeys, log, (err, result) => {
assert.ifError(err);
assert(result);
assert.strictEqual(result.objectKey1versionId1, metadataObjs[0].doc);
assert.strictEqual(result.objectKey2versionId2, metadataObjs[1].doc);
done();
});
});
});
describe('metadataGetObject', () => {
let sandbox;
const objectKey = { inPlay: { key: 'objectKey1' }, versionId: 'versionId1' };
beforeEach(() => {
sandbox = sinon.createSandbox();
});
afterEach(() => {
sandbox.restore();
});
it('should return the cached document if provided', done => {
const cachedDoc = { key: 'objectKey1', versionId: 'versionId1' };
metadataGetObject('bucketName', objectKey.inPlay.key, objectKey.versionId, log, (err, result) => {
assert.ifError(err);
assert.deepStrictEqual(result, cachedDoc);
done();
}, cachedDoc);
});
it('should return error if metadata.getObjectMD fails', done => {
const error = new Error('Failed to get object metadata');
sandbox.stub(metadata, 'getObjectMD').yields(error);
metadataGetObject('bucketName', objectKey.inPlay.key, objectKey.versionId, log, err => {
assert(err);
assert.strictEqual(err, error);
done();
});
});
it('should return object metadata if successful', done => {
const metadataObj = { doc: { key: 'objectKey1', versionId: 'versionId1' } };
sandbox.stub(metadata, 'getObjectMD').yields(null, metadataObj);
metadataGetObject('bucketName', objectKey.inPlay.key, objectKey.versionId, log, (err, result) => {
assert.ifError(err);
assert.deepStrictEqual(result, metadataObj);
done();
});
});
});

210
yarn.lock
View File

@ -357,6 +357,11 @@
resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.2.tgz#5981a8db18b56ba38ef0efb7d995b12aa7b51918"
integrity sha512-sXXKG+uL9IrKqViTtao2Ws6dy0znu9sOaP1di/jKGW1M6VssO8vlpXCQcpZ+jisQ1tTFAC5Jo/EOzFbggBagFQ==
"@socket.io/component-emitter@~3.1.0":
version "3.1.0"
resolved "https://registry.yarnpkg.com/@socket.io/component-emitter/-/component-emitter-3.1.0.tgz#96116f2a912e0c02817345b3c10751069920d553"
integrity sha512-+9jVqKhRSpsc591z5vX+X5Yyw+he/HCB4iQ/RYxw35CEPaY1gnsNE43nf9n9AaYjAQrTiI/mOwKUKdUs9vf7Xg==
"@tootallnate/once@1":
version "1.1.2"
resolved "https://registry.yarnpkg.com/@tootallnate/once/-/once-1.1.2.tgz#ccb91445360179a04e7fe6aff78c00ffc1eeaf82"
@ -372,6 +377,18 @@
resolved "https://registry.yarnpkg.com/@types/async/-/async-3.2.15.tgz#26d4768fdda0e466f18d6c9918ca28cc89a4e1fe"
integrity sha512-PAmPfzvFA31mRoqZyTVsgJMsvbynR429UTTxhmfsUCrWGh3/fxOrzqBtaTPJsn4UtzTv4Vb0+/O7CARWb69N4g==
"@types/cookie@^0.4.1":
version "0.4.1"
resolved "https://registry.yarnpkg.com/@types/cookie/-/cookie-0.4.1.tgz#bfd02c1f2224567676c1545199f87c3a861d878d"
integrity sha512-XW/Aa8APYr6jSVVA1y/DEIZX0/GMKLEVekNG727R8cs56ahETkRAy/3DR7+fJyh7oUgGwNQaRfXCun0+KbWY7Q==
"@types/cors@^2.8.12":
version "2.8.13"
resolved "https://registry.yarnpkg.com/@types/cors/-/cors-2.8.13.tgz#b8ade22ba455a1b8cb3b5d3f35910fd204f84f94"
integrity sha512-RG8AStHlUiV5ysZQKq97copd2UmVYw3/pRMLefISZ3S1hK104Cwm7iLQ3fTKx+lsUH2CE8FlLaYeEA2LSeqYUA==
dependencies:
"@types/node" "*"
"@types/json5@^0.0.29":
version "0.0.29"
resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee"
@ -390,6 +407,11 @@
resolved "https://registry.yarnpkg.com/@types/node/-/node-18.11.18.tgz#8dfb97f0da23c2293e554c5a50d61ef134d7697f"
integrity sha512-DHQpWGjyQKSHj3ebjFI/wRKcqQcdR+MoFBygntYOZytCqNfkd2ZC4ARDJ2DQqhjH5p85Nnd3jhUJIXrszFX/JA==
"@types/node@>=10.0.0":
version "20.3.1"
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.3.1.tgz#e8a83f1aa8b649377bb1fb5d7bac5cb90e784dfe"
integrity sha512-EhcH/wvidPy1WeML3TtYFGR83UzjxeWRen9V402T8aUGYsCHOmfoisV3ZSg03gAFIbLq8TnWOJ0f4cALtnSEUg==
"@types/tunnel@^0.0.3":
version "0.0.3"
resolved "https://registry.yarnpkg.com/@types/tunnel/-/tunnel-0.0.3.tgz#f109e730b072b3136347561fc558c9358bb8c6e9"
@ -402,6 +424,19 @@
resolved "https://registry.yarnpkg.com/@types/utf8/-/utf8-3.0.1.tgz#bf081663d4fff05ee63b41f377a35f8b189f7e5b"
integrity sha512-1EkWuw7rT3BMz2HpmcEOr/HL61mWNA6Ulr/KdbXR9AI0A55wD4Qfv8hizd8Q1DnknSIzzDvQmvvY/guvX7jjZA==
"@types/webidl-conversions@*":
version "7.0.0"
resolved "https://registry.yarnpkg.com/@types/webidl-conversions/-/webidl-conversions-7.0.0.tgz#2b8e60e33906459219aa587e9d1a612ae994cfe7"
integrity sha512-xTE1E+YF4aWPJJeUzaZI5DRntlkY3+BCVJi0axFptnjGmAoWxkyREIh/XMrfxVLejwQxMCfDXdICo0VLxThrog==
"@types/whatwg-url@^8.2.1":
version "8.2.2"
resolved "https://registry.yarnpkg.com/@types/whatwg-url/-/whatwg-url-8.2.2.tgz#749d5b3873e845897ada99be4448041d4cc39e63"
integrity sha512-FtQu10RWgn3D9U4aazdwIE2yzphmTJREDqNdODHrbrZmmMqI0vMheC/6NE/J1Yveaj8H+ela+YwWTjq5PGmuhA==
dependencies:
"@types/node" "*"
"@types/webidl-conversions" "*"
JSONStream@^1.0.0:
version "1.3.5"
resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0"
@ -654,7 +689,6 @@ arraybuffer.slice@~0.0.7:
"arsenal@git+https://github.com/scality/Arsenal#8.1.87":
version "8.1.87"
uid ab0324da059c62171da4b9cf496dd067e22caac9
resolved "git+https://github.com/scality/Arsenal#ab0324da059c62171da4b9cf496dd067e22caac9"
dependencies:
"@azure/identity" "^3.1.1"
@ -695,9 +729,9 @@ arraybuffer.slice@~0.0.7:
optionalDependencies:
ioctl "^2.0.2"
"arsenal@git+https://github.com/scality/arsenal#8.1.87":
version "8.1.87"
resolved "git+https://github.com/scality/arsenal#ab0324da059c62171da4b9cf496dd067e22caac9"
"arsenal@git+https://github.com/scality/arsenal#74d70291c05dae09806b785017556717cc1d8214":
version "8.1.103"
resolved "git+https://github.com/scality/arsenal#74d70291c05dae09806b785017556717cc1d8214"
dependencies:
"@azure/identity" "^3.1.1"
"@azure/storage-blob" "^12.12.0"
@ -723,13 +757,13 @@ arraybuffer.slice@~0.0.7:
joi "^17.6.0"
level "~5.0.1"
level-sublevel "~6.6.5"
mongodb "^3.0.1"
mongodb "^5.2.0"
node-forge "^1.3.0"
prom-client "14.2.0"
simple-glob "^0.2.0"
socket.io "2.4.1"
socket.io-client "2.4.0"
sproxydclient scality/sproxydclient#8.0.7
socket.io "~4.6.1"
socket.io-client "~4.6.1"
sproxydclient "git+https://github.com/scality/sproxydclient#8.0.9"
utf8 "3.0.0"
uuid "^3.0.1"
werelogs scality/werelogs#8.1.2
@ -971,7 +1005,7 @@ base64-js@^1.0.2, base64-js@^1.3.1:
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
base64id@2.0.0:
base64id@2.0.0, base64id@~2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/base64id/-/base64id-2.0.0.tgz#2770ac6bc47d312af97a8bf9a634342e0cd25cb6"
integrity sha512-lGe34o6EHj9y3Kts9R4ZYs/Gr+6N7MCaMlIFA3F1R2O5/m7K06AxfSeO5530PEERE6/WyEg3lsuyw4GHlPZHog==
@ -1071,6 +1105,11 @@ bson@^1.1.4:
resolved "https://registry.yarnpkg.com/bson/-/bson-1.1.6.tgz#fb819be9a60cd677e0853aee4ca712a785d6618a"
integrity sha512-EvVNVeGo4tHxwi8L6bPj3y3itEvStdwvvlojVxxbyYfoaxJ6keLgrTuKdyfEAszFK+H3olzBuafE0yoh0D1gdg==
bson@^5.3.0:
version "5.3.0"
resolved "https://registry.yarnpkg.com/bson/-/bson-5.3.0.tgz#37b006df4cd91ed125cb686467c1dd6d4606b514"
integrity sha512-ukmCZMneMlaC5ebPHXIkP8YJzNl5DC41N5MAIvKDqLggdao342t4McltoJBQfQya/nHBWAcSsYRqlXPoQkTJag==
bson@~1.0.4:
version "1.0.9"
resolved "https://registry.yarnpkg.com/bson/-/bson-1.0.9.tgz#12319f8323b1254739b7c6bef8d3e89ae05a2f57"
@ -1405,6 +1444,14 @@ core-util-is@~1.0.0:
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85"
integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==
cors@~2.8.5:
version "2.8.5"
resolved "https://registry.yarnpkg.com/cors/-/cors-2.8.5.tgz#eac11da51592dd86b9f06f6e7ac293b3df875d29"
integrity sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==
dependencies:
object-assign "^4"
vary "^1"
cron-parser@^2.11.0, cron-parser@^2.15.0, cron-parser@^2.18.0:
version "2.18.0"
resolved "https://registry.yarnpkg.com/cron-parser/-/cron-parser-2.18.0.tgz#de1bb0ad528c815548371993f81a54e5a089edcf"
@ -1477,7 +1524,7 @@ debug@2.6.9, debug@^2.2.0, debug@^2.6.8, debug@^2.6.9:
dependencies:
ms "2.0.0"
debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3:
debug@4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@~4.3.1, debug@~4.3.2:
version "4.3.4"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
@ -1690,6 +1737,17 @@ engine.io-client@~3.5.0:
xmlhttprequest-ssl "~1.6.2"
yeast "0.1.2"
engine.io-client@~6.4.0:
version "6.4.0"
resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-6.4.0.tgz#88cd3082609ca86d7d3c12f0e746d12db4f47c91"
integrity sha512-GyKPDyoEha+XZ7iEqam49vz6auPnNJ9ZBfy89f+rMMas8AuiMWOZ9PVzu8xb9ZC6rafUqiGHSCfu22ih66E+1g==
dependencies:
"@socket.io/component-emitter" "~3.1.0"
debug "~4.3.1"
engine.io-parser "~5.0.3"
ws "~8.11.0"
xmlhttprequest-ssl "~2.0.0"
engine.io-parser@~2.2.0:
version "2.2.1"
resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.2.1.tgz#57ce5611d9370ee94f99641b589f94c97e4f5da7"
@ -1701,6 +1759,11 @@ engine.io-parser@~2.2.0:
blob "0.0.5"
has-binary2 "~1.0.2"
engine.io-parser@~5.0.3:
version "5.0.7"
resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.0.7.tgz#ed5eae76c71f398284c578ab6deafd3ba7e4e4f6"
integrity sha512-P+jDFbvK6lE3n1OL+q9KuzdOFWkkZ/cMV9gol/SbVfpyqfvrfrFTOFJ6fQm2VC3PZHlU3QPhVwmbsCnauHF2MQ==
engine.io@~3.5.0:
version "3.5.0"
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.5.0.tgz#9d6b985c8a39b1fe87cd91eb014de0552259821b"
@ -1713,6 +1776,22 @@ engine.io@~3.5.0:
engine.io-parser "~2.2.0"
ws "~7.4.2"
engine.io@~6.4.2:
version "6.4.2"
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.4.2.tgz#ffeaf68f69b1364b0286badddf15ff633476473f"
integrity sha512-FKn/3oMiJjrOEOeUub2WCox6JhxBXq/Zn3fZOMCBxKnNYtsdKjxhl7yR3fZhM9PV+rdE75SU5SYMc+2PGzo+Tg==
dependencies:
"@types/cookie" "^0.4.1"
"@types/cors" "^2.8.12"
"@types/node" ">=10.0.0"
accepts "~1.3.4"
base64id "2.0.0"
cookie "~0.4.1"
cors "~2.8.5"
debug "~4.3.1"
engine.io-parser "~5.0.3"
ws "~8.11.0"
entities@~2.0.0:
version "2.0.3"
resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.3.tgz#5c487e5742ab93c15abb5da22759b8590ec03b7f"
@ -4038,6 +4117,14 @@ moment-timezone@^0.5.31:
resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108"
integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==
mongodb-connection-string-url@^2.6.0:
version "2.6.0"
resolved "https://registry.yarnpkg.com/mongodb-connection-string-url/-/mongodb-connection-string-url-2.6.0.tgz#57901bf352372abdde812c81be47b75c6b2ec5cf"
integrity sha512-WvTZlI9ab0QYtTYnuMLgobULWhokRjtC7db9LtcVfJ+Hsnyr5eo6ZtNAt3Ly24XZScGMelOcGtm7lSn0332tPQ==
dependencies:
"@types/whatwg-url" "^8.2.1"
whatwg-url "^11.0.0"
mongodb-core@2.1.20:
version "2.1.20"
resolved "https://registry.yarnpkg.com/mongodb-core/-/mongodb-core-2.1.20.tgz#fece8dd76b59ee7d7f2d313b65322c160492d8f1"
@ -4068,6 +4155,17 @@ mongodb@^3.0.1:
optionalDependencies:
saslprep "^1.0.0"
mongodb@^5.2.0:
version "5.6.0"
resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-5.6.0.tgz#caff5278341bfc0f1ef6f394bb403d207de03d1e"
integrity sha512-z8qVs9NfobHJm6uzK56XBZF8XwM9H294iRnB7wNjF0SnY93si5HPziIJn+qqvUR5QOff/4L0gCD6SShdR/GtVQ==
dependencies:
bson "^5.3.0"
mongodb-connection-string-url "^2.6.0"
socks "^2.7.1"
optionalDependencies:
saslprep "^1.0.3"
ms@0.7.1:
version "0.7.1"
resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.1.tgz#9cd13c03adbff25b65effde7ce864ee952017098"
@ -4287,6 +4385,11 @@ oauth-sign@~0.9.0:
resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455"
integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==
object-assign@^4:
version "4.1.1"
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==
object-inspect@^1.12.0, object-inspect@^1.9.0:
version "1.12.2"
resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea"
@ -4915,7 +5018,7 @@ safe-stable-stringify@^2.3.1:
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
saslprep@^1.0.0:
saslprep@^1.0.0, saslprep@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/saslprep/-/saslprep-1.0.3.tgz#4c02f946b56cf54297e347ba1093e7acac4cf226"
integrity sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==
@ -5085,6 +5188,13 @@ socket.io-adapter@~1.1.0:
resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-1.1.2.tgz#ab3f0d6f66b8fc7fca3959ab5991f82221789be9"
integrity sha512-WzZRUj1kUjrTIrUKpZLEzFZ1OLj5FwLlAFQs9kuZJzJi5DKdU7FsWc36SNmA8iDOtwBQyT8FkrriRM8vXLYz8g==
socket.io-adapter@~2.5.2:
version "2.5.2"
resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-2.5.2.tgz#5de9477c9182fdc171cd8c8364b9a8894ec75d12"
integrity sha512-87C3LO/NOMc+eMcpcxUBebGjkpMDkNBS9tf7KJqcDsmL936EChtVva71Dw2q4tQcuVC+hAUy4an2NO/sYXmwRA==
dependencies:
ws "~8.11.0"
socket.io-client@2.4.0:
version "2.4.0"
resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.4.0.tgz#aafb5d594a3c55a34355562fc8aea22ed9119a35"
@ -5102,6 +5212,16 @@ socket.io-client@2.4.0:
socket.io-parser "~3.3.0"
to-array "0.1.4"
socket.io-client@~4.6.1:
version "4.6.2"
resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-4.6.2.tgz#2bfde952e74625d54e622718a7cb1d591ee62fd6"
integrity sha512-OwWrMbbA8wSqhBAR0yoPK6EdQLERQAYjXb3A0zLpgxfM1ZGLKoxHx8gVmCHA6pcclRX5oA/zvQf7bghAS11jRA==
dependencies:
"@socket.io/component-emitter" "~3.1.0"
debug "~4.3.2"
engine.io-client "~6.4.0"
socket.io-parser "~4.2.4"
socket.io-parser@~3.3.0:
version "3.3.2"
resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-3.3.2.tgz#ef872009d0adcf704f2fbe830191a14752ad50b6"
@ -5120,6 +5240,14 @@ socket.io-parser@~3.4.0:
debug "~4.1.0"
isarray "2.0.1"
socket.io-parser@~4.2.4:
version "4.2.4"
resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-4.2.4.tgz#c806966cf7270601e47469ddeec30fbdfda44c83"
integrity sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew==
dependencies:
"@socket.io/component-emitter" "~3.1.0"
debug "~4.3.1"
socket.io@2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.4.1.tgz#95ad861c9a52369d7f1a68acf0d4a1b16da451d2"
@ -5132,6 +5260,18 @@ socket.io@2.4.1:
socket.io-client "2.4.0"
socket.io-parser "~3.4.0"
socket.io@~4.6.1:
version "4.6.2"
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.2.tgz#d597db077d4df9cbbdfaa7a9ed8ccc3d49439786"
integrity sha512-Vp+lSks5k0dewYTfwgPT9UeGGd+ht7sCpB7p0e83VgO4X/AHYWhXITMrNk/pg8syY2bpx23ptClCQuHhqi2BgQ==
dependencies:
accepts "~1.3.4"
base64id "~2.0.0"
debug "~4.3.2"
engine.io "~6.4.2"
socket.io-adapter "~2.5.2"
socket.io-parser "~4.2.4"
socks-proxy-agent@^6.0.0:
version "6.2.1"
resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-6.2.1.tgz#2687a31f9d7185e38d530bef1944fe1f1496d6ce"
@ -5149,6 +5289,14 @@ socks@^2.6.2:
ip "^2.0.0"
smart-buffer "^4.2.0"
socks@^2.7.1:
version "2.7.1"
resolved "https://registry.yarnpkg.com/socks/-/socks-2.7.1.tgz#d8e651247178fde79c0663043e07240196857d55"
integrity sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==
dependencies:
ip "^2.0.0"
smart-buffer "^4.2.0"
sorted-array-functions@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/sorted-array-functions/-/sorted-array-functions-1.3.0.tgz#8605695563294dffb2c9796d602bd8459f7a0dd5"
@ -5202,6 +5350,14 @@ sprintf-js@~1.0.2:
resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c"
integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==
"sproxydclient@git+https://github.com/scality/sproxydclient#8.0.9":
version "8.0.9"
resolved "git+https://github.com/scality/sproxydclient#117b53fb472fb95e06605a5f6872be068a659fda"
dependencies:
async "^3.2.0"
httpagent "github:scality/httpagent#1.0.6"
werelogs scality/werelogs#8.1.2
sproxydclient@scality/sproxydclient#8.0.7:
version "8.0.7"
resolved "https://codeload.github.com/scality/sproxydclient/tar.gz/46049c28d2487c8e4ea4c7dde2be2fc90dbcc9ec"
@ -5493,6 +5649,13 @@ tough-cookie@~2.5.0:
psl "^1.1.28"
punycode "^2.1.1"
tr46@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/tr46/-/tr46-3.0.0.tgz#555c4e297a950617e8eeddef633c87d4d9d6cbf9"
integrity sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==
dependencies:
punycode "^2.1.1"
tr46@~0.0.3:
version "0.0.3"
resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a"
@ -5793,7 +5956,7 @@ validator@^13.0.0, validator@^13.6.0, validator@^13.7.0:
resolved "https://registry.yarnpkg.com/validator/-/validator-13.7.0.tgz#4f9658ba13ba8f3d82ee881d3516489ea85c0857"
integrity sha512-nYXQLCBkpJ8X6ltALua9dRrZDHVYxjJ1wgskNt1lH9fzGjs3tgojGSCBjmEPwkWS1y29+DrizMTW19Pr9uB2nw==
vary@~1.1.2:
vary@^1, vary@~1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==
@ -5832,6 +5995,11 @@ webidl-conversions@^3.0.0:
resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871"
integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==
webidl-conversions@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-7.0.0.tgz#256b4e1882be7debbf01d05f0aa2039778ea080a"
integrity sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==
"werelogs@git+https://github.com/scality/werelogs#8.1.0":
version "8.1.0"
resolved "git+https://github.com/scality/werelogs#e8f828725642c54c511cdbe580b18f43d3589313"
@ -5857,6 +6025,14 @@ whatwg-mimetype@^2.3.0:
resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf"
integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g==
whatwg-url@^11.0.0:
version "11.0.0"
resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-11.0.0.tgz#0a849eebb5faf2119b901bb76fd795c2848d4018"
integrity sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==
dependencies:
tr46 "^3.0.0"
webidl-conversions "^7.0.0"
whatwg-url@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d"
@ -5961,6 +6137,11 @@ ws@~7.4.2:
resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.6.tgz#5654ca8ecdeee47c33a9a4bf6d28e2be2980377c"
integrity sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==
ws@~8.11.0:
version "8.11.0"
resolved "https://registry.yarnpkg.com/ws/-/ws-8.11.0.tgz#6a0d36b8edfd9f96d8b25683db2f8d7de6e8e143"
integrity sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg==
xml2js@0.4.19:
version "0.4.19"
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7"
@ -6004,6 +6185,11 @@ xmlhttprequest-ssl@~1.6.2:
resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.6.3.tgz#03b713873b01659dfa2c1c5d056065b27ddc2de6"
integrity sha512-3XfeQE/wNkvrIktn2Kf0869fC0BN6UpydVasGIeSm2B1Llihf7/0UfZM+eCkOw3P7bP4+qPgqhm7ZoxuJtFU0Q==
xmlhttprequest-ssl@~2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-2.0.0.tgz#91360c86b914e67f44dce769180027c0da618c67"
integrity sha512-QKxVRxiRACQcVuQEYFsI1hhkrMlrXHPegbbd1yn9UHOmRxY+si12nQYzri3vbzt8VdTTRviqcKxcyllFas5z2A==
xtend@^4.0.2, xtend@~4.0.0:
version "4.0.2"
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"