Compare commits
19 Commits
developmen
...
improvemen
Author | SHA1 | Date |
---|---|---|
Taylor McKinnon | 69a203f6ea | |
Rahul Padigela | 93ccc3df5b | |
Taylor McKinnon | 53d143efa7 | |
Taylor McKinnon | 2ec6968565 | |
Taylor McKinnon | 5bdc7b97d4 | |
tmacro | 4eb77f9327 | |
Taylor McKinnon | 5239f013d9 | |
Taylor McKinnon | 49d1d65f37 | |
Taylor McKinnon | 5cdcee201b | |
Jonathan Gramain | 121352ebfb | |
Jonathan Gramain | d5e2a7a894 | |
Nicolas Humbert | de66293473 | |
Nicolas Humbert | 067644b8db | |
Maha Benzekri | c0dfd6fe5e | |
benzekrimaha | 962dede838 | |
Maha Benzekri | 85fb1fe606 | |
Maha Benzekri | 570602b902 | |
Maha Benzekri | 3af5d1b692 | |
Maha Benzekri | 677605e48c |
|
@ -23,10 +23,3 @@ runs:
|
|||
- name: install dependencies
|
||||
shell: bash
|
||||
run: yarn install --ignore-engines --frozen-lockfile --network-concurrency 1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip
|
||||
- name: Install python deps
|
||||
shell: bash
|
||||
run: pip install docker-compose
|
||||
|
|
|
@ -34,4 +34,4 @@ gcpbackendmismatch_GCP_SERVICE_KEY
|
|||
gcpbackend_GCP_SERVICE_KEYFILE
|
||||
gcpbackendmismatch_GCP_SERVICE_KEYFILE
|
||||
gcpbackendnoproxy_GCP_SERVICE_KEYFILE
|
||||
gcpbackendproxy_GCP_SERVICE_KEYFILE
|
||||
gcpbackendproxy_GCP_SERVICE_KEYFILE
|
||||
|
|
|
@ -184,7 +184,7 @@ jobs:
|
|||
- name: Setup CI environment
|
||||
uses: ./.github/actions/setup-ci
|
||||
- name: Setup CI services
|
||||
run: docker-compose up -d
|
||||
run: docker compose up -d
|
||||
working-directory: .github/docker
|
||||
- name: Run multiple backend test
|
||||
run: |-
|
||||
|
@ -226,9 +226,7 @@ jobs:
|
|||
uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: |
|
||||
2.7
|
||||
3.9
|
||||
python-version: 3.9
|
||||
- name: Setup CI environment
|
||||
uses: ./.github/actions/setup-ci
|
||||
- name: Setup matrix job artifacts directory
|
||||
|
@ -236,21 +234,17 @@ jobs:
|
|||
run: |
|
||||
set -exu
|
||||
mkdir -p /tmp/artifacts/${{ matrix.job-name }}/
|
||||
- name: Setup python2 test environment
|
||||
- name: Setup python test environment
|
||||
run: |
|
||||
sudo apt-get install -y libdigest-hmac-perl
|
||||
pip install virtualenv==20.21.0
|
||||
virtualenv -p $(which python2) ~/.virtualenv/py2
|
||||
source ~/.virtualenv/py2/bin/activate
|
||||
pip install 's3cmd==1.6.1'
|
||||
pip install 's3cmd==2.3.0'
|
||||
- name: Setup CI services
|
||||
run: docker-compose up -d
|
||||
run: docker compose up -d
|
||||
working-directory: .github/docker
|
||||
- name: Run file ft tests
|
||||
run: |-
|
||||
set -o pipefail;
|
||||
bash wait_for_local_port.bash 8000 40
|
||||
source ~/.virtualenv/py2/bin/activate
|
||||
yarn run ft_test | tee /tmp/artifacts/${{ matrix.job-name }}/tests.log
|
||||
- name: Upload logs to artifacts
|
||||
uses: scality/action-artifacts@v3
|
||||
|
@ -280,7 +274,7 @@ jobs:
|
|||
- name: Setup CI environment
|
||||
uses: ./.github/actions/setup-ci
|
||||
- name: Setup CI services
|
||||
run: docker-compose up -d
|
||||
run: docker compose up -d
|
||||
working-directory: .github/docker
|
||||
- name: Run file utapi v2 tests
|
||||
run: |-
|
||||
|
@ -318,7 +312,7 @@ jobs:
|
|||
run: cp -r ./certs /tmp/ssl-kmip
|
||||
working-directory: .github/pykmip
|
||||
- name: Setup CI services
|
||||
run: docker-compose --profile pykmip up -d
|
||||
run: docker compose --profile pykmip up -d
|
||||
working-directory: .github/docker
|
||||
- name: Run file KMIP tests
|
||||
run: |-
|
||||
|
|
10
constants.js
10
constants.js
|
@ -177,6 +177,16 @@ const constants = {
|
|||
assumedRoleArnResourceType: 'assumed-role',
|
||||
// Session name of the backbeat lifecycle assumed role session.
|
||||
backbeatLifecycleSessionName: 'backbeat-lifecycle',
|
||||
unsupportedSignatureChecksums: new Set([
|
||||
'STREAMING-UNSIGNED-PAYLOAD-TRAILER',
|
||||
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER',
|
||||
'STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD',
|
||||
'STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD-TRAILER',
|
||||
]),
|
||||
supportedSignatureChecksums: new Set([
|
||||
'UNSIGNED-PAYLOAD',
|
||||
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD',
|
||||
]),
|
||||
};
|
||||
|
||||
module.exports = constants;
|
||||
|
|
7
index.js
7
index.js
|
@ -1,3 +1,10 @@
|
|||
'use strict'; // eslint-disable-line strict
|
||||
|
||||
/**
|
||||
* Catch uncaught exceptions and add timestamp to aid debugging
|
||||
*/
|
||||
process.on('uncaughtException', err => {
|
||||
process.stderr.write(`${new Date().toISOString()}: Uncaught exception: \n${err.stack}`);
|
||||
});
|
||||
|
||||
require('./lib/server.js')();
|
||||
|
|
|
@ -289,7 +289,14 @@ function locationConstraintAssert(locationConstraints) {
|
|||
'include us-east-1 as a locationConstraint');
|
||||
}
|
||||
|
||||
function parseUtapiReindex({ enabled, schedule, sentinel, bucketd }) {
|
||||
function parseUtapiReindex(config) {
|
||||
const {
|
||||
enabled,
|
||||
schedule,
|
||||
sentinel,
|
||||
bucketd,
|
||||
onlyCountLatestWhenObjectLocked,
|
||||
} = config;
|
||||
assert(typeof enabled === 'boolean',
|
||||
'bad config: utapi.reindex.enabled must be a boolean');
|
||||
assert(typeof sentinel === 'object',
|
||||
|
@ -304,6 +311,10 @@ function parseUtapiReindex({ enabled, schedule, sentinel, bucketd }) {
|
|||
'bad config: utapi.reindex.bucketd.port must be a number');
|
||||
assert(typeof schedule === 'string',
|
||||
'bad config: utapi.reindex.schedule must be a string');
|
||||
if (onlyCountLatestWhenObjectLocked !== undefined) {
|
||||
assert(typeof onlyCountLatestWhenObjectLocked === 'boolean',
|
||||
'bad config: utapi.reindex.onlyCountLatestWhenObjectLocked must be a boolean');
|
||||
}
|
||||
try {
|
||||
cronParser.parseExpression(schedule);
|
||||
} catch (e) {
|
||||
|
|
|
@ -37,8 +37,10 @@ const AMZ_ABORT_ID_HEADER = 'x-amz-abort-rule-id';
|
|||
|
||||
function _generateExpHeadersObjects(rules, params, datetime) {
|
||||
const tags = {
|
||||
TagSet: Object.keys(params.tags)
|
||||
.map(key => ({ Key: key, Value: params.tags[key] })),
|
||||
TagSet: params.tags
|
||||
? Object.keys(params.tags)
|
||||
.map(key => ({ Key: key, Value: params.tags[key] }))
|
||||
: [],
|
||||
};
|
||||
|
||||
const objectInfo = { Key: params.key };
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
const { errors } = require('arsenal');
|
||||
|
||||
const { unsupportedSignatureChecksums, supportedSignatureChecksums } = require('../../../../constants');
|
||||
|
||||
function validateChecksumHeaders(headers) {
|
||||
// If the x-amz-trailer header is present the request is using one of the
|
||||
// trailing checksum algorithms, which are not supported.
|
||||
if (headers['x-amz-trailer'] !== undefined) {
|
||||
return errors.BadRequest.customizeDescription('trailing checksum is not supported');
|
||||
}
|
||||
|
||||
const signatureChecksum = headers['x-amz-content-sha256'];
|
||||
if (signatureChecksum === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (supportedSignatureChecksums.has(signatureChecksum)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// If the value is not one of the possible checksum algorithms
|
||||
// the only other valid value is the actual sha256 checksum of the payload.
|
||||
// Do a simple sanity check of the length to guard against future algos.
|
||||
// If the value is an unknown algo, then it will fail checksum validation.
|
||||
if (!unsupportedSignatureChecksums.has(signatureChecksum) && signatureChecksum.length === 64) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return errors.BadRequest.customizeDescription('unsupported checksum algorithm');
|
||||
}
|
||||
|
||||
module.exports = validateChecksumHeaders;
|
|
@ -360,60 +360,86 @@ function versioningPreprocessing(bucketName, bucketMD, objectKey, objMD,
|
|||
});
|
||||
}
|
||||
|
||||
/** Return options to pass to Metadata layer for version-specific
|
||||
* operations with the given requested version ID
|
||||
*
|
||||
* @param {object} objectMD - object metadata
|
||||
* @param {boolean} nullVersionCompatMode - if true, behaves in null
|
||||
* version compatibility mode
|
||||
* @return {object} options object with params:
|
||||
* {string} [options.versionId] - specific versionId to update
|
||||
* {boolean} [options.isNull=true|false|undefined] - if set, tells the
|
||||
* Metadata backend if we're updating or deleting a new-style null
|
||||
* version (stored in master or null key), or not a null version.
|
||||
*/
|
||||
function getVersionSpecificMetadataOptions(objectMD, nullVersionCompatMode) {
|
||||
// Use the internal versionId if it is a "real" null version (not
|
||||
// non-versioned)
|
||||
//
|
||||
// If the target object is non-versioned: do not specify a
|
||||
// "versionId" attribute nor "isNull"
|
||||
//
|
||||
// If the target version is a null version, i.e. has the "isNull"
|
||||
// attribute:
|
||||
//
|
||||
// - send the "isNull=true" param to Metadata if the version is
|
||||
// already a null key put by a non-compat mode Cloudserver, to
|
||||
// let Metadata know that the null key is to be updated or
|
||||
// deleted. This is the case if the "isNull2" metadata attribute
|
||||
// exists
|
||||
//
|
||||
// - otherwise, do not send the "isNull" parameter to hint
|
||||
// Metadata that it is a legacy null version
|
||||
//
|
||||
// If the target version is not a null version and is versioned:
|
||||
//
|
||||
// - send the "isNull=false" param to Metadata in non-compat
|
||||
// mode (mandatory for v1 format)
|
||||
//
|
||||
// - otherwise, do not send the "isNull" parameter to hint
|
||||
// Metadata that an existing null version may not be stored in a
|
||||
// null key
|
||||
//
|
||||
//
|
||||
if (objectMD.versionId === undefined) {
|
||||
return {};
|
||||
}
|
||||
const options = { versionId: objectMD.versionId };
|
||||
if (objectMD.isNull) {
|
||||
if (objectMD.isNull2) {
|
||||
options.isNull = true;
|
||||
}
|
||||
} else if (!nullVersionCompatMode) {
|
||||
options.isNull = false;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
/** preprocessingVersioningDelete - return versioning information for S3 to
|
||||
* manage deletion of objects and versions, including creation of delete markers
|
||||
* @param {string} bucketName - name of bucket
|
||||
* @param {object} bucketMD - bucket metadata
|
||||
* @param {object} objectMD - obj metadata
|
||||
* @param {string} [reqVersionId] - specific version ID sent as part of request
|
||||
* @param {boolean} nullVersionCompatMode - if true, behaves in null
|
||||
* version compatibility mode and return appropriate values:
|
||||
* - in normal mode, returns an 'isNull' boolean sent to Metadata (true or false)
|
||||
* - in compatibility mode, does not return an 'isNull' property
|
||||
* @param {boolean} nullVersionCompatMode - if true, behaves in null version compatibility mode
|
||||
* @return {object} options object with params:
|
||||
* {boolean} [options.deleteData=true|undefined] - whether to delete data (if undefined
|
||||
* means creating a delete marker instead)
|
||||
* {string} [options.versionId] - specific versionId to delete
|
||||
* {boolean} [options.isNull=true|false|undefined] - if set, tells the
|
||||
* Metadata backend if we're deleting a null version or not a null
|
||||
* version. Not set if `nullVersionCompatMode` is true.
|
||||
* Metadata backend if we're deleting a new-style null version (stored
|
||||
* in master or null key), or not a null version.
|
||||
*/
|
||||
function preprocessingVersioningDelete(bucketName, bucketMD, objectMD, reqVersionId, nullVersionCompatMode) {
|
||||
const options = {};
|
||||
let options = {};
|
||||
if (bucketMD.getVersioningConfiguration() && reqVersionId) {
|
||||
options = getVersionSpecificMetadataOptions(objectMD, nullVersionCompatMode);
|
||||
}
|
||||
if (!bucketMD.getVersioningConfiguration() || reqVersionId) {
|
||||
// delete data if bucket is non-versioned or the request
|
||||
// deletes a specific version
|
||||
options.deleteData = true;
|
||||
}
|
||||
if (bucketMD.getVersioningConfiguration() && reqVersionId) {
|
||||
if (reqVersionId === 'null') {
|
||||
// deleting the 'null' version if it exists:
|
||||
//
|
||||
// - use its internal versionId if it is a "real" null
|
||||
// version (not non-versioned)
|
||||
//
|
||||
// - send the "isNull" param to Metadata if:
|
||||
//
|
||||
// - in non-compat mode (mandatory for v1 format)
|
||||
//
|
||||
// - OR if the version is already a null key put by a
|
||||
// non-compat mode Cloudserver, to let Metadata know that
|
||||
// the null key is to be deleted. This is the case if the
|
||||
// "isNull2" param is set.
|
||||
if (objectMD.versionId !== undefined) {
|
||||
options.versionId = objectMD.versionId;
|
||||
if (objectMD.isNull2) {
|
||||
options.isNull = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// deleting a specific version
|
||||
options.versionId = reqVersionId;
|
||||
if (!nullVersionCompatMode) {
|
||||
options.isNull = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
|
@ -424,5 +450,6 @@ module.exports = {
|
|||
processVersioningState,
|
||||
getMasterState,
|
||||
versioningPreprocessing,
|
||||
getVersionSpecificMetadataOptions,
|
||||
preprocessingVersioningDelete,
|
||||
};
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const async = require('async');
|
||||
const { errors } = require('arsenal');
|
||||
|
||||
const { decodeVersionId, getVersionIdResHeader }
|
||||
const { decodeVersionId, getVersionIdResHeader, getVersionSpecificMetadataOptions }
|
||||
= require('./apiUtils/object/versioning');
|
||||
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
|
@ -75,13 +75,7 @@ function objectDeleteTagging(authInfo, request, log, callback) {
|
|||
(bucket, objectMD, next) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
objectMD.tags = {};
|
||||
const params = {};
|
||||
if (objectMD.versionId) {
|
||||
params.versionId = objectMD.versionId;
|
||||
if (!config.nullVersionCompatMode) {
|
||||
params.isNull = objectMD.isNull || false;
|
||||
}
|
||||
}
|
||||
const params = getVersionSpecificMetadataOptions(objectMD, config.nullVersionCompatMode);
|
||||
const replicationInfo = getReplicationInfo(objectKey, bucket, true,
|
||||
0, REPLICATION_ACTION, objectMD);
|
||||
if (replicationInfo) {
|
||||
|
|
|
@ -15,6 +15,8 @@ const kms = require('../kms/wrapper');
|
|||
const { config } = require('../Config');
|
||||
const { setExpirationHeaders } = require('./apiUtils/object/expirationHeaders');
|
||||
const monitoring = require('../utilities/metrics');
|
||||
const validateChecksumHeaders = require('./apiUtils/object/validateChecksumHeaders');
|
||||
|
||||
const writeContinue = require('../utilities/writeContinue');
|
||||
const versionIdUtils = versioning.VersionID;
|
||||
|
||||
|
@ -69,6 +71,11 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
|
|||
));
|
||||
}
|
||||
|
||||
const checksumHeaderErr = validateChecksumHeaders(headers);
|
||||
if (checksumHeaderErr) {
|
||||
return callback(checksumHeaderErr);
|
||||
}
|
||||
|
||||
log.trace('owner canonicalID to send to data', { canonicalID });
|
||||
|
||||
return metadataValidateBucketAndObj(valParams, log,
|
||||
|
|
|
@ -7,7 +7,7 @@ const { pushMetric } = require('../utapi/utilities');
|
|||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const constants = require('../../constants');
|
||||
const vault = require('../auth/vault');
|
||||
const { decodeVersionId, getVersionIdResHeader }
|
||||
const { decodeVersionId, getVersionIdResHeader, getVersionSpecificMetadataOptions }
|
||||
= require('./apiUtils/object/versioning');
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
const monitoring = require('../utilities/metrics');
|
||||
|
@ -281,13 +281,7 @@ function objectPutACL(authInfo, request, log, cb) {
|
|||
},
|
||||
function addAclsToObjMD(bucket, objectMD, ACLParams, next) {
|
||||
// Add acl's to object metadata
|
||||
const params = {};
|
||||
if (objectMD.versionId) {
|
||||
params.versionId = objectMD.versionId;
|
||||
if (!config.nullVersionCompatMode) {
|
||||
params.isNull = objectMD.isNull || false;
|
||||
}
|
||||
}
|
||||
const params = getVersionSpecificMetadataOptions(objectMD, config.nullVersionCompatMode);
|
||||
acl.addObjectACL(bucket, objectKey, objectMD,
|
||||
ACLParams, params, log, err => next(err, bucket, objectMD));
|
||||
},
|
||||
|
|
|
@ -2,7 +2,7 @@ const async = require('async');
|
|||
const { errors, s3middleware } = require('arsenal');
|
||||
|
||||
const collectCorsHeaders = require('../utilities/collectCorsHeaders');
|
||||
const { decodeVersionId, getVersionIdResHeader } =
|
||||
const { decodeVersionId, getVersionIdResHeader, getVersionSpecificMetadataOptions } =
|
||||
require('./apiUtils/object/versioning');
|
||||
const getReplicationInfo = require('./apiUtils/object/getReplicationInfo');
|
||||
const metadata = require('../metadata/wrapper');
|
||||
|
@ -86,13 +86,7 @@ function objectPutLegalHold(authInfo, request, log, callback) {
|
|||
(bucket, legalHold, objectMD, next) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
objectMD.legalHold = legalHold;
|
||||
const params = {};
|
||||
if (objectMD.versionId) {
|
||||
params.versionId = objectMD.versionId;
|
||||
if (!config.nullVersionCompatMode) {
|
||||
params.isNull = objectMD.isNull || false;
|
||||
}
|
||||
}
|
||||
const params = getVersionSpecificMetadataOptions(objectMD, config.nullVersionCompatMode);
|
||||
const replicationInfo = getReplicationInfo(objectKey, bucket, true,
|
||||
0, REPLICATION_ACTION, objectMD);
|
||||
if (replicationInfo) {
|
||||
|
|
|
@ -19,6 +19,8 @@ const locationConstraintCheck
|
|||
const monitoring = require('../utilities/metrics');
|
||||
const writeContinue = require('../utilities/writeContinue');
|
||||
const { getObjectSSEConfiguration } = require('./apiUtils/bucket/bucketEncryption');
|
||||
const validateChecksumHeaders = require('./apiUtils/object/validateChecksumHeaders');
|
||||
|
||||
const skipError = new Error('skip');
|
||||
|
||||
// We pad the partNumbers so that the parts will be sorted in numerical order.
|
||||
|
@ -64,6 +66,11 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
return cb(errors.EntityTooLarge);
|
||||
}
|
||||
|
||||
const checksumHeaderErr = validateChecksumHeaders(request.headers);
|
||||
if (checksumHeaderErr) {
|
||||
return cb(checksumHeaderErr);
|
||||
}
|
||||
|
||||
// Note: Part sizes cannot be less than 5MB in size except for the last.
|
||||
// However, we do not check this value here because we cannot know which
|
||||
// part will be the last until a complete MPU request is made. Thus, we let
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const async = require('async');
|
||||
const { errors, s3middleware } = require('arsenal');
|
||||
|
||||
const { decodeVersionId, getVersionIdResHeader } =
|
||||
const { decodeVersionId, getVersionIdResHeader, getVersionSpecificMetadataOptions } =
|
||||
require('./apiUtils/object/versioning');
|
||||
const { ObjectLockInfo, checkUserGovernanceBypass, hasGovernanceBypassHeader } =
|
||||
require('./apiUtils/object/objectLockHelpers');
|
||||
|
@ -116,13 +116,7 @@ function objectPutRetention(authInfo, request, log, callback) {
|
|||
/* eslint-disable no-param-reassign */
|
||||
objectMD.retentionMode = retentionInfo.mode;
|
||||
objectMD.retentionDate = retentionInfo.date;
|
||||
const params = {};
|
||||
if (objectMD.versionId) {
|
||||
params.versionId = objectMD.versionId;
|
||||
if (!config.nullVersionCompatMode) {
|
||||
params.isNull = objectMD.isNull || false;
|
||||
}
|
||||
}
|
||||
const params = getVersionSpecificMetadataOptions(objectMD, config.nullVersionCompatMode);
|
||||
const replicationInfo = getReplicationInfo(objectKey, bucket, true,
|
||||
0, REPLICATION_ACTION, objectMD);
|
||||
if (replicationInfo) {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
const async = require('async');
|
||||
const { errors, s3middleware } = require('arsenal');
|
||||
|
||||
const { decodeVersionId, getVersionIdResHeader } =
|
||||
const { decodeVersionId, getVersionIdResHeader, getVersionSpecificMetadataOptions } =
|
||||
require('./apiUtils/object/versioning');
|
||||
|
||||
const { metadataValidateBucketAndObj } = require('../metadata/metadataUtils');
|
||||
|
@ -81,13 +81,7 @@ function objectPutTagging(authInfo, request, log, callback) {
|
|||
(bucket, tags, objectMD, next) => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
objectMD.tags = tags;
|
||||
const params = {};
|
||||
if (objectMD.versionId) {
|
||||
params.versionId = objectMD.versionId;
|
||||
if (!config.nullVersionCompatMode) {
|
||||
params.isNull = objectMD.isNull || false;
|
||||
}
|
||||
}
|
||||
const params = getVersionSpecificMetadataOptions(objectMD, config.nullVersionCompatMode);
|
||||
const replicationInfo = getReplicationInfo(objectKey, bucket, true,
|
||||
0, REPLICATION_ACTION, objectMD);
|
||||
if (replicationInfo) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "s3",
|
||||
"version": "7.70.21",
|
||||
"version": "7.70.21-6",
|
||||
"description": "S3 connector",
|
||||
"main": "index.js",
|
||||
"engines": {
|
||||
|
@ -20,7 +20,7 @@
|
|||
"homepage": "https://github.com/scality/S3#readme",
|
||||
"dependencies": {
|
||||
"@hapi/joi": "^17.1.0",
|
||||
"arsenal": "git+https://github.com/scality/arsenal#7.70.4",
|
||||
"arsenal": "git+https://github.com/scality/arsenal#7.70.4-1",
|
||||
"async": "~2.5.0",
|
||||
"aws-sdk": "2.905.0",
|
||||
"azure-storage": "^2.1.0",
|
||||
|
@ -35,7 +35,7 @@
|
|||
"moment": "^2.26.0",
|
||||
"npm-run-all": "~4.1.5",
|
||||
"prom-client": "14.2.0",
|
||||
"utapi": "git+https://github.com/scality/utapi#7.10.12",
|
||||
"utapi": "git+https://github.com/scality/utapi#7.70.3",
|
||||
"utf8": "~2.1.1",
|
||||
"uuid": "^3.0.1",
|
||||
"vaultclient": "scality/vaultclient#7.10.13",
|
||||
|
|
|
@ -30,6 +30,33 @@ function getPolicyParams(paramToChange) {
|
|||
};
|
||||
}
|
||||
|
||||
function getPolicyParamsWithId(paramToChange, policyId) {
|
||||
const newParam = {};
|
||||
const bucketPolicy = {
|
||||
Version: '2012-10-17',
|
||||
Id: policyId,
|
||||
Statement: [basicStatement],
|
||||
};
|
||||
if (paramToChange) {
|
||||
newParam[paramToChange.key] = paramToChange.value;
|
||||
bucketPolicy.Statement[0] = Object.assign({}, basicStatement, newParam);
|
||||
}
|
||||
return {
|
||||
Bucket: bucket,
|
||||
Policy: JSON.stringify(bucketPolicy),
|
||||
};
|
||||
}
|
||||
|
||||
function generateRandomString(length) {
|
||||
// All allowed characters matching the regex in arsenal
|
||||
const allowedCharacters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789+=,.@ -/';
|
||||
const allowedCharactersLength = allowedCharacters.length;
|
||||
|
||||
return [...Array(length)]
|
||||
.map(() => allowedCharacters[~~(Math.random() * allowedCharactersLength)])
|
||||
.join('');
|
||||
}
|
||||
|
||||
// Check for the expected error response code and status code.
|
||||
function assertError(err, expectedErr, cb) {
|
||||
if (expectedErr === null) {
|
||||
|
@ -102,5 +129,31 @@ describe('aws-sdk test put bucket policy', () => {
|
|||
s3.putBucketPolicy(params, err =>
|
||||
assertError(err, 'MalformedPolicy', done));
|
||||
});
|
||||
|
||||
it('should return MalformedPolicy because Id is not a string',
|
||||
done => {
|
||||
const params = getPolicyParamsWithId(null, 59);
|
||||
s3.putBucketPolicy(params, err =>
|
||||
assertError(err, 'MalformedPolicy', done));
|
||||
});
|
||||
|
||||
it('should put a bucket policy on bucket since Id is a string',
|
||||
done => {
|
||||
const params = getPolicyParamsWithId(null, 'cd3ad3d9-2776-4ef1-a904-4c229d1642e');
|
||||
s3.putBucketPolicy(params, err =>
|
||||
assertError(err, null, done));
|
||||
});
|
||||
|
||||
it('should allow bucket policy with pincipal arn less than 2048 characters', done => {
|
||||
const params = getPolicyParams({ key: 'Principal', value: { AWS: `arn:aws:iam::767707094035:user/${generateRandomString(150)}` } }); // eslint-disable-line max-len
|
||||
s3.putBucketPolicy(params, err =>
|
||||
assertError(err, null, done));
|
||||
});
|
||||
|
||||
it('should not allow bucket policy with pincipal arn more than 2048 characters', done => {
|
||||
const params = getPolicyParams({ key: 'Principal', value: { AWS: `arn:aws:iam::767707094035:user/${generateRandomString(2020)}` } }); // eslint-disable-line max-len
|
||||
s3.putBucketPolicy(params, err =>
|
||||
assertError(err, 'MalformedPolicy', done));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -0,0 +1,156 @@
|
|||
const assert = require('assert');
|
||||
const async = require('async');
|
||||
|
||||
const BucketUtility = require('../../lib/utility/bucket-util');
|
||||
|
||||
const {
|
||||
removeAllVersions,
|
||||
versioningEnabled,
|
||||
} = require('../../lib/utility/versioning-util.js');
|
||||
|
||||
// This series of tests can only be enabled on an environment that has
|
||||
// two Cloudserver instances, with one of them in null version
|
||||
// compatibility mode. This is why they have to be explicitly enabled,
|
||||
// which is done in a particular Integration test suite. This test
|
||||
// suite makes the most sense in Integration because it tests the
|
||||
// combination of Cloudserver requests to bucketd and the behavior of
|
||||
// bucketd based on those requests.
|
||||
|
||||
const describeSkipIfNotExplicitlyEnabled =
|
||||
process.env.ENABLE_LEGACY_NULL_VERSION_COMPAT_TESTS ? describe : describe.skip;
|
||||
|
||||
describeSkipIfNotExplicitlyEnabled('legacy null version compatibility tests', () => {
|
||||
const bucketUtilCompat = new BucketUtility('default', {
|
||||
endpoint: 'http://127.0.0.1:8001',
|
||||
});
|
||||
const s3Compat = bucketUtilCompat.s3;
|
||||
const bucketUtil = new BucketUtility('default', {});
|
||||
const s3 = bucketUtil.s3;
|
||||
const bucket = `legacy-null-version-compat-${Date.now()}`;
|
||||
|
||||
// In this series of tests, we first create a non-current null
|
||||
// version in legacy format (with "nullVersionId" field in the
|
||||
// master and no "isNull2" metadata attribute), by using the
|
||||
// Cloudserver endpoint that is configured with null version
|
||||
// compatibility mode enabled.
|
||||
beforeEach(done => async.series([
|
||||
next => s3Compat.createBucket({
|
||||
Bucket: bucket,
|
||||
}, next),
|
||||
next => s3Compat.putObject({
|
||||
Bucket: bucket,
|
||||
Key: 'obj',
|
||||
Body: 'nullbody',
|
||||
}, next),
|
||||
next => s3Compat.putBucketVersioning({
|
||||
Bucket: bucket,
|
||||
VersioningConfiguration: versioningEnabled,
|
||||
}, next),
|
||||
next => s3Compat.putObject({
|
||||
Bucket: bucket,
|
||||
Key: 'obj',
|
||||
Body: 'versionedbody',
|
||||
}, next),
|
||||
], done));
|
||||
|
||||
afterEach(done => {
|
||||
removeAllVersions({ Bucket: bucket }, err => {
|
||||
if (err) {
|
||||
return done(err);
|
||||
}
|
||||
return s3Compat.deleteBucket({ Bucket: bucket }, done);
|
||||
});
|
||||
});
|
||||
|
||||
it('updating ACL of legacy null version with non-compat cloudserver', done => {
|
||||
async.series([
|
||||
next => s3.putObjectAcl({
|
||||
Bucket: bucket,
|
||||
Key: 'obj',
|
||||
VersionId: 'null',
|
||||
ACL: 'public-read',
|
||||
}, next),
|
||||
next => s3.getObjectAcl({
|
||||
Bucket: bucket,
|
||||
Key: 'obj',
|
||||
VersionId: 'null',
|
||||
}, (err, acl) => {
|
||||
assert.ifError(err);
|
||||
// check that we fetched the updated null version
|
||||
assert.strictEqual(acl.Grants.length, 2);
|
||||
next();
|
||||
}),
|
||||
next => s3.deleteObject({
|
||||
Bucket: bucket,
|
||||
Key: 'obj',
|
||||
VersionId: 'null',
|
||||
}, next),
|
||||
next => s3.listObjectVersions({
|
||||
Bucket: bucket,
|
||||
}, (err, listing) => {
|
||||
assert.ifError(err);
|
||||
// check that the null version has been correctly deleted
|
||||
assert(listing.Versions.every(version => version.VersionId !== 'null'));
|
||||
next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
|
||||
it('updating tags of legacy null version with non-compat cloudserver', done => {
|
||||
const tagSet = [
|
||||
{
|
||||
Key: 'newtag',
|
||||
Value: 'newtagvalue',
|
||||
},
|
||||
];
|
||||
async.series([
|
||||
next => s3.putObjectTagging({
|
||||
Bucket: bucket,
|
||||
Key: 'obj',
|
||||
VersionId: 'null',
|
||||
Tagging: {
|
||||
TagSet: tagSet,
|
||||
},
|
||||
}, next),
|
||||
next => s3.getObjectTagging({
|
||||
Bucket: bucket,
|
||||
Key: 'obj',
|
||||
VersionId: 'null',
|
||||
}, (err, tagging) => {
|
||||
assert.ifError(err);
|
||||
assert.deepStrictEqual(tagging.TagSet, tagSet);
|
||||
next();
|
||||
}),
|
||||
next => s3.deleteObjectTagging({
|
||||
Bucket: bucket,
|
||||
Key: 'obj',
|
||||
VersionId: 'null',
|
||||
}, err => {
|
||||
assert.ifError(err);
|
||||
next();
|
||||
}),
|
||||
next => s3.getObjectTagging({
|
||||
Bucket: bucket,
|
||||
Key: 'obj',
|
||||
VersionId: 'null',
|
||||
}, (err, tagging) => {
|
||||
assert.ifError(err);
|
||||
assert.deepStrictEqual(tagging.TagSet, []);
|
||||
next();
|
||||
}),
|
||||
next => s3.deleteObject({
|
||||
Bucket: bucket,
|
||||
Key: 'obj',
|
||||
VersionId: 'null',
|
||||
}, next),
|
||||
next => s3.listObjectVersions({
|
||||
Bucket: bucket,
|
||||
}, (err, listing) => {
|
||||
assert.ifError(err);
|
||||
// check that the null version has been correctly deleted
|
||||
assert(listing.Versions.every(version => version.VersionId !== 'null'));
|
||||
next();
|
||||
}),
|
||||
], done);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,70 @@
|
|||
const assert = require('assert');
|
||||
const { makeS3Request } = require('../utils/makeRequest');
|
||||
const HttpRequestAuthV4 = require('../utils/HttpRequestAuthV4');
|
||||
|
||||
const bucket = 'testunsupportedchecksumsbucket';
|
||||
const objectKey = 'key';
|
||||
const objData = Buffer.alloc(1024, 'a');
|
||||
|
||||
const authCredentials = {
|
||||
accessKey: 'accessKey1',
|
||||
secretKey: 'verySecretKey1',
|
||||
};
|
||||
|
||||
const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it;
|
||||
|
||||
describe('unsupported checksum requests:', () => {
|
||||
before(done => {
|
||||
makeS3Request({
|
||||
method: 'PUT',
|
||||
authCredentials,
|
||||
bucket,
|
||||
}, err => {
|
||||
assert.ifError(err);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
after(done => {
|
||||
makeS3Request({
|
||||
method: 'DELETE',
|
||||
authCredentials,
|
||||
bucket,
|
||||
}, err => {
|
||||
assert.ifError(err);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
itSkipIfAWS('should respond with BadRequest for trailing checksum', done => {
|
||||
const req = new HttpRequestAuthV4(
|
||||
`http://localhost:8000/${bucket}/${objectKey}`,
|
||||
Object.assign(
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'content-length': objData.length,
|
||||
'x-amz-content-sha256': 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER',
|
||||
'x-amz-trailer': 'x-amz-checksum-sha256',
|
||||
},
|
||||
},
|
||||
authCredentials
|
||||
),
|
||||
res => {
|
||||
assert.strictEqual(res.statusCode, 400);
|
||||
res.on('data', () => {});
|
||||
res.on('end', done);
|
||||
}
|
||||
);
|
||||
|
||||
req.on('error', err => {
|
||||
assert.ifError(err);
|
||||
});
|
||||
|
||||
req.write(objData);
|
||||
|
||||
req.once('drain', () => {
|
||||
req.end();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -165,7 +165,9 @@ function readJsonFromChild(child, lineFinder, cb) {
|
|||
const findBrace = data.indexOf('{', findLine);
|
||||
const findEnd = findEndJson(data, findBrace);
|
||||
const endJson = data.substring(findBrace, findEnd + 1)
|
||||
.replace(/"/g, '\\"').replace(/'/g, '"');
|
||||
.replace(/"/g, '\\"').replace(/'/g, '"')
|
||||
.replace(/b'/g, '\'')
|
||||
.replace(/b"/g, '"');
|
||||
return cb(JSON.parse(endJson));
|
||||
});
|
||||
}
|
||||
|
@ -344,18 +346,18 @@ describe('s3cmd getService', () => {
|
|||
|
||||
it("should have response headers matching AWS's response headers",
|
||||
done => {
|
||||
provideLineOfInterest(['ls', '--debug'], 'DEBUG: Response: {',
|
||||
provideLineOfInterest(['ls', '--debug'], '\'headers\': {',
|
||||
parsedObject => {
|
||||
assert(parsedObject.headers['x-amz-id-2']);
|
||||
assert(parsedObject.headers['transfer-encoding']);
|
||||
assert(parsedObject.headers['x-amz-request-id']);
|
||||
const gmtDate = new Date(parsedObject.headers.date)
|
||||
assert(parsedObject['x-amz-id-2']);
|
||||
assert(parsedObject['transfer-encoding']);
|
||||
assert(parsedObject['x-amz-request-id']);
|
||||
const gmtDate = new Date(parsedObject.date)
|
||||
.toUTCString();
|
||||
assert.strictEqual(parsedObject.headers.date, gmtDate);
|
||||
assert.strictEqual(parsedObject.date, gmtDate);
|
||||
assert.strictEqual(parsedObject
|
||||
.headers['content-type'], 'application/xml');
|
||||
['content-type'], 'application/xml');
|
||||
assert.strictEqual(parsedObject
|
||||
.headers['set-cookie'], undefined);
|
||||
['set-cookie'], undefined);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
@ -395,11 +397,11 @@ describe('s3cmd getObject', function toto() {
|
|||
});
|
||||
|
||||
it('get non existing file in existing bucket, should fail', done => {
|
||||
exec(['get', `s3://${bucket}/${nonexist}`, 'fail'], done, 12);
|
||||
exec(['get', `s3://${bucket}/${nonexist}`, 'fail'], done, 64);
|
||||
});
|
||||
|
||||
it('get file in non existing bucket, should fail', done => {
|
||||
exec(['get', `s3://${nonexist}/${nonexist}`, 'fail2'], done, 12);
|
||||
exec(['get', `s3://${nonexist}/${nonexist}`, 'fail2'], done, 64);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -511,7 +513,7 @@ describe('s3cmd delObject', () => {
|
|||
|
||||
it('delete an already deleted object, should return a 204', done => {
|
||||
provideLineOfInterest(['rm', `s3://${bucket}/${upload}`, '--debug'],
|
||||
'DEBUG: Response: {', parsedObject => {
|
||||
'DEBUG: Response:\n{', parsedObject => {
|
||||
assert.strictEqual(parsedObject.status, 204);
|
||||
done();
|
||||
});
|
||||
|
@ -519,14 +521,14 @@ describe('s3cmd delObject', () => {
|
|||
|
||||
it('delete non-existing object, should return a 204', done => {
|
||||
provideLineOfInterest(['rm', `s3://${bucket}/${nonexist}`, '--debug'],
|
||||
'DEBUG: Response: {', parsedObject => {
|
||||
'DEBUG: Response:\n{', parsedObject => {
|
||||
assert.strictEqual(parsedObject.status, 204);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('try to get the deleted object, should fail', done => {
|
||||
exec(['get', `s3://${bucket}/${upload}`, download], done, 12);
|
||||
exec(['get', `s3://${bucket}/${upload}`, download], done, 64);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -621,7 +623,7 @@ describe('s3cmd multipart upload', function titi() {
|
|||
});
|
||||
|
||||
it('should not be able to get deleted object', done => {
|
||||
exec(['get', `s3://${bucket}/${MPUpload}`, download], done, 12);
|
||||
exec(['get', `s3://${bucket}/${MPUpload}`, download], done, 64);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -660,7 +662,7 @@ MPUploadSplitter.forEach(file => {
|
|||
});
|
||||
|
||||
it('should not be able to get deleted object', done => {
|
||||
exec(['get', `s3://${bucket}/${file}`, download], done, 12);
|
||||
exec(['get', `s3://${bucket}/${file}`, download], done, 64);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -728,7 +730,7 @@ describe('s3cmd info', () => {
|
|||
|
||||
// test that POLICY and CORS are returned as 'none'
|
||||
it('should find that policy has a value of none', done => {
|
||||
checkRawOutput(['info', `s3://${bucket}`], 'policy', 'none',
|
||||
checkRawOutput(['info', `s3://${bucket}`], 'Policy', 'none',
|
||||
'stdout', foundIt => {
|
||||
assert(foundIt);
|
||||
done();
|
||||
|
@ -736,7 +738,7 @@ describe('s3cmd info', () => {
|
|||
});
|
||||
|
||||
it('should find that cors has a value of none', done => {
|
||||
checkRawOutput(['info', `s3://${bucket}`], 'cors', 'none',
|
||||
checkRawOutput(['info', `s3://${bucket}`], 'CORS', 'none',
|
||||
'stdout', foundIt => {
|
||||
assert(foundIt);
|
||||
done();
|
||||
|
@ -762,7 +764,7 @@ describe('s3cmd info', () => {
|
|||
});
|
||||
|
||||
it('should find that cors has a value', done => {
|
||||
checkRawOutput(['info', `s3://${bucket}`], 'cors', corsConfig,
|
||||
checkRawOutput(['info', `s3://${bucket}`], 'CORS', corsConfig,
|
||||
'stdout', foundIt => {
|
||||
assert(foundIt, 'Did not find value for cors');
|
||||
done();
|
||||
|
|
|
@ -103,6 +103,16 @@ describe('generateExpirationHeaders', () => {
|
|||
},
|
||||
{},
|
||||
],
|
||||
[
|
||||
'should provide correct headers for compatibility with legacy objects missing the tags property',
|
||||
{
|
||||
lifecycleConfig: lifecycleExpirationDays,
|
||||
objectParams: { key: 'object', date: objectDate },
|
||||
},
|
||||
{
|
||||
'x-amz-expiration': `expiry-date="${expectedDaysExpiryDate}", rule-id="test-days"`,
|
||||
},
|
||||
],
|
||||
[
|
||||
'should return correct headers for object (days)',
|
||||
{
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
const assert = require('assert');
|
||||
|
||||
const validateChecksumHeaders = require('../../../../lib/api/apiUtils/object/validateChecksumHeaders');
|
||||
const { unsupportedSignatureChecksums, supportedSignatureChecksums } = require('../../../../constants');
|
||||
|
||||
const passingCases = [
|
||||
{
|
||||
description: 'should return null if no checksum headers are present',
|
||||
headers: {},
|
||||
},
|
||||
{
|
||||
description: 'should return null if UNSIGNED-PAYLOAD is used',
|
||||
headers: {
|
||||
'x-amz-content-sha256': 'UNSIGNED-PAYLOAD',
|
||||
},
|
||||
},
|
||||
{
|
||||
description: 'should return null if a sha256 checksum is used',
|
||||
headers: {
|
||||
'x-amz-content-sha256': 'thisIs64CharactersLongAndThatsAllWeCheckFor1234567890abcdefghijk',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
supportedSignatureChecksums.forEach(checksum => {
|
||||
passingCases.push({
|
||||
description: `should return null if ${checksum} is used`,
|
||||
headers: {
|
||||
'x-amz-content-sha256': checksum,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const failingCases = [
|
||||
{
|
||||
description: 'should return BadRequest if a trailing checksum is used',
|
||||
headers: {
|
||||
'x-amz-trailer': 'test',
|
||||
},
|
||||
},
|
||||
{
|
||||
description: 'should return BadRequest if an unknown algo is used',
|
||||
headers: {
|
||||
'x-amz-content-sha256': 'UNSUPPORTED-CHECKSUM',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
unsupportedSignatureChecksums.forEach(checksum => {
|
||||
failingCases.push({
|
||||
description: `should return BadRequest if ${checksum} is used`,
|
||||
headers: {
|
||||
'x-amz-content-sha256': checksum,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
describe('validateChecksumHeaders', () => {
|
||||
passingCases.forEach(testCase => {
|
||||
it(testCase.description, () => {
|
||||
const result = validateChecksumHeaders(testCase.headers);
|
||||
assert.ifError(result);
|
||||
});
|
||||
});
|
||||
|
||||
failingCases.forEach(testCase => {
|
||||
it(testCase.description, () => {
|
||||
const result = validateChecksumHeaders(testCase.headers);
|
||||
assert(result instanceof Error, 'Expected an error to be returned');
|
||||
assert.strictEqual(result.is.BadRequest, true);
|
||||
assert.strictEqual(result.code, 400);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -5,6 +5,7 @@ const { config } = require('../../../../lib/Config');
|
|||
const INF_VID = versioning.VersionID.getInfVid(config.replicationGroupId);
|
||||
|
||||
const { processVersioningState, getMasterState,
|
||||
getVersionSpecificMetadataOptions,
|
||||
preprocessingVersioningDelete } =
|
||||
require('../../../../lib/api/apiUtils/object/versioning');
|
||||
|
||||
|
@ -527,6 +528,68 @@ describe('versioning helpers', () => {
|
|||
}))));
|
||||
});
|
||||
|
||||
describe('getVersionSpecificMetadataOptions', () => {
|
||||
[
|
||||
{
|
||||
description: 'object put before versioning was first enabled',
|
||||
objMD: {},
|
||||
expectedRes: {},
|
||||
expectedResCompat: {},
|
||||
},
|
||||
{
|
||||
description: 'non-null object version',
|
||||
objMD: {
|
||||
versionId: 'v1',
|
||||
},
|
||||
expectedRes: {
|
||||
versionId: 'v1',
|
||||
isNull: false,
|
||||
},
|
||||
expectedResCompat: {
|
||||
versionId: 'v1',
|
||||
},
|
||||
},
|
||||
{
|
||||
description: 'legacy null object version',
|
||||
objMD: {
|
||||
versionId: 'vnull',
|
||||
isNull: true,
|
||||
},
|
||||
expectedRes: {
|
||||
versionId: 'vnull',
|
||||
},
|
||||
expectedResCompat: {
|
||||
versionId: 'vnull',
|
||||
},
|
||||
},
|
||||
{
|
||||
description: 'null object version in null key',
|
||||
objMD: {
|
||||
versionId: 'vnull',
|
||||
isNull: true,
|
||||
isNull2: true,
|
||||
},
|
||||
expectedRes: {
|
||||
versionId: 'vnull',
|
||||
isNull: true,
|
||||
},
|
||||
expectedResCompat: {
|
||||
versionId: 'vnull',
|
||||
isNull: true,
|
||||
},
|
||||
},
|
||||
].forEach(testCase =>
|
||||
[false, true].forEach(nullVersionCompatMode =>
|
||||
it(`${testCase.description}${nullVersionCompatMode ? ' (null compat)' : ''}`,
|
||||
() => {
|
||||
const options = getVersionSpecificMetadataOptions(
|
||||
testCase.objMD, nullVersionCompatMode);
|
||||
const expectedResAttr = nullVersionCompatMode ?
|
||||
'expectedResCompat' : 'expectedRes';
|
||||
assert.deepStrictEqual(options, testCase[expectedResAttr]);
|
||||
})));
|
||||
});
|
||||
|
||||
describe('preprocessingVersioningDelete', () => {
|
||||
[
|
||||
{
|
||||
|
|
12
yarn.lock
12
yarn.lock
|
@ -426,9 +426,9 @@ arraybuffer.slice@~0.0.7:
|
|||
optionalDependencies:
|
||||
ioctl "^2.0.2"
|
||||
|
||||
"arsenal@git+https://github.com/scality/arsenal#7.70.4":
|
||||
version "7.70.4"
|
||||
resolved "git+https://github.com/scality/arsenal#c4cc5a2c3dfa4a8d6d565c4029ec05cbb0bf1a3e"
|
||||
"arsenal@git+https://github.com/scality/arsenal#7.70.4-1":
|
||||
version "7.70.4-1"
|
||||
resolved "git+https://github.com/scality/arsenal#09a474d3eae9db23bcfed760fa70aafd961a2ce7"
|
||||
dependencies:
|
||||
"@types/async" "^3.2.12"
|
||||
"@types/utf8" "^3.0.1"
|
||||
|
@ -5161,9 +5161,9 @@ user-home@^2.0.0:
|
|||
dependencies:
|
||||
os-homedir "^1.0.0"
|
||||
|
||||
"utapi@git+https://github.com/scality/utapi#7.10.12":
|
||||
version "7.10.12"
|
||||
resolved "git+https://github.com/scality/utapi#347cf3c1cb088bc14bea082227100f93d1b11597"
|
||||
"utapi@git+https://github.com/scality/utapi#7.70.3":
|
||||
version "7.70.3"
|
||||
resolved "git+https://github.com/scality/utapi#e8882a28cc888b2a96479f0301a16f45ce2b0603"
|
||||
dependencies:
|
||||
"@hapi/joi" "^17.1.1"
|
||||
"@senx/warp10" "^1.0.14"
|
||||
|
|
Loading…
Reference in New Issue