Compare commits
14 Commits
developmen
...
hotfix/7.1
Author | SHA1 | Date |
---|---|---|
Taylor McKinnon | 0618da71e1 | |
Francois Ferrand | 9d41897ce8 | |
Francois Ferrand | 85b17453c4 | |
Francois Ferrand | 6b72aaf2e2 | |
Francois Ferrand | f87f8750e9 | |
tmacro | a6e6e2f4eb | |
Taylor McKinnon | a11e1ad3bb | |
Taylor McKinnon | fb6a4db2fc | |
Taylor McKinnon | 8cb82a2f56 | |
Nicolas Humbert | 1779c7a77d | |
Nicolas Humbert | 4cb1e1f617 | |
Maha Benzekri | 3e97e8f38a | |
Maha Benzekri | 7fffd38814 | |
Maha Benzekri | 9da4e45f11 |
|
@ -16,17 +16,10 @@ runs:
|
|||
run: |-
|
||||
set -exu;
|
||||
mkdir -p /tmp/artifacts/${{ github.job }}/;
|
||||
- uses: actions/setup-node@v2
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: 'yarn'
|
||||
- name: install dependencies
|
||||
shell: bash
|
||||
run: yarn install --ignore-engines --frozen-lockfile --network-concurrency 1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip
|
||||
- name: Install python deps
|
||||
shell: bash
|
||||
run: pip install docker-compose
|
||||
|
|
|
@ -34,4 +34,4 @@ gcpbackendmismatch_GCP_SERVICE_KEY
|
|||
gcpbackend_GCP_SERVICE_KEYFILE
|
||||
gcpbackendmismatch_GCP_SERVICE_KEYFILE
|
||||
gcpbackendnoproxy_GCP_SERVICE_KEYFILE
|
||||
gcpbackendproxy_GCP_SERVICE_KEYFILE
|
||||
gcpbackendproxy_GCP_SERVICE_KEYFILE
|
||||
|
|
|
@ -61,6 +61,6 @@ services:
|
|||
pykmip:
|
||||
network_mode: "host"
|
||||
profiles: ['pykmip']
|
||||
image: registry.scality.com/cloudserver-dev/pykmip
|
||||
image: ${PYKMIP_IMAGE:-ghcr.io/scality/cloudserver/pykmip}
|
||||
volumes:
|
||||
- /tmp/artifacts/${JOB_NAME}:/artifacts
|
||||
|
|
|
@ -14,12 +14,12 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: javascript, python, ruby
|
||||
|
||||
- name: Build and analyze
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
|
|
|
@ -10,7 +10,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Checkout Repository'
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 'Dependency Review'
|
||||
uses: actions/dependency-review-action@v3
|
||||
uses: actions/dependency-review-action@v4
|
||||
|
|
|
@ -10,36 +10,59 @@ on:
|
|||
|
||||
jobs:
|
||||
build-federation-image:
|
||||
uses: scality/workflows/.github/workflows/docker-build.yaml@v1
|
||||
secrets: inherit
|
||||
with:
|
||||
push: true
|
||||
registry: registry.scality.com
|
||||
namespace: ${{ github.event.repository.name }}
|
||||
name: ${{ github.event.repository.name }}
|
||||
context: .
|
||||
file: images/svc-base/Dockerfile
|
||||
tag: ${{ github.event.inputs.tag }}-svc-base
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to GitHub Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ github.token }}
|
||||
- name: Build and push image for federation
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: true
|
||||
context: .
|
||||
file: images/svc-base/Dockerfile
|
||||
tags: |
|
||||
ghcr.io/${{ github.repository }}:${{ github.event.inputs.tag }}-svc-base
|
||||
cache-from: type=gha,scope=federation
|
||||
cache-to: type=gha,mode=max,scope=federation
|
||||
|
||||
build-image:
|
||||
uses: scality/workflows/.github/workflows/docker-build.yaml@v1
|
||||
secrets: inherit
|
||||
with:
|
||||
push: true
|
||||
registry: registry.scality.com
|
||||
namespace: ${{ github.event.repository.name }}
|
||||
name: ${{ github.event.repository.name }}
|
||||
context: .
|
||||
file: Dockerfile
|
||||
tag: ${{ github.event.inputs.tag }}
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to GitHub Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ github.token }}
|
||||
- name: Build and push image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: true
|
||||
context: .
|
||||
tags: |
|
||||
ghcr.io/${{ github.repository }}:${{ github.event.inputs.tag }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
github-release:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Create Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@v2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
with:
|
||||
name: Release ${{ github.event.inputs.tag }}
|
||||
tag_name: ${{ github.event.inputs.tag }}
|
||||
|
|
|
@ -65,23 +65,24 @@ env:
|
|||
ENABLE_LOCAL_CACHE: "true"
|
||||
REPORT_TOKEN: "report-token-1"
|
||||
REMOTE_MANAGEMENT_DISABLE: "1"
|
||||
|
||||
# https://github.com/git-lfs/git-lfs/issues/5749
|
||||
GIT_CLONE_PROTECTION_ACTIVE: 'false'
|
||||
jobs:
|
||||
linting-coverage:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v2
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '16'
|
||||
cache: yarn
|
||||
- name: install dependencies
|
||||
run: yarn install --frozen-lockfile --network-concurrency 1
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.9'
|
||||
- uses: actions/cache@v2
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip
|
||||
|
@ -114,7 +115,7 @@ jobs:
|
|||
find . -name "*junit*.xml" -exec cp {} artifacts/junit/ ";"
|
||||
if: always()
|
||||
- name: Upload files to artifacts
|
||||
uses: scality/action-artifacts@v2
|
||||
uses: scality/action-artifacts@v4
|
||||
with:
|
||||
method: upload
|
||||
url: https://artifacts.scality.net
|
||||
|
@ -127,64 +128,78 @@ jobs:
|
|||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1.6.0
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to GitHub Registry
|
||||
uses: docker/login-action@v1.10.0
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Login to Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: registry.scality.com
|
||||
username: ${{ secrets.REGISTRY_LOGIN }}
|
||||
password: ${{ secrets.REGISTRY_PASSWORD }}
|
||||
password: ${{ github.token }}
|
||||
- name: Build and push cloudserver image
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: true
|
||||
context: .
|
||||
provenance: false
|
||||
tags: |
|
||||
ghcr.io/${{ github.repository }}/cloudserver:${{ github.sha }}
|
||||
registry.scality.com/cloudserver-dev/cloudserver:${{ github.sha }}
|
||||
ghcr.io/${{ github.repository }}:${{ github.sha }}
|
||||
cache-from: type=gha,scope=cloudserver
|
||||
cache-to: type=gha,mode=max,scope=cloudserver
|
||||
- name: Build and push pykmip image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: true
|
||||
context: .github/pykmip
|
||||
tags: |
|
||||
ghcr.io/${{ github.repository }}/pykmip:${{ github.sha }}
|
||||
cache-from: type=gha,scope=pykmip
|
||||
cache-to: type=gha,mode=max,scope=pykmip
|
||||
|
||||
build-federation-image:
|
||||
uses: scality/workflows/.github/workflows/docker-build.yaml@v1
|
||||
secrets: inherit
|
||||
with:
|
||||
push: true
|
||||
registry: registry.scality.com
|
||||
namespace: cloudserver-dev
|
||||
name: cloudserver
|
||||
context: .
|
||||
file: images/svc-base/Dockerfile
|
||||
tag: ${{ github.sha }}-svc-base
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Login to GitHub Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ github.token }}
|
||||
- name: Build and push image for federation
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: true
|
||||
context: .
|
||||
file: images/svc-base/Dockerfile
|
||||
tags: |
|
||||
ghcr.io/${{ github.repository }}:${{ github.sha }}-svc-base
|
||||
cache-from: type=gha,scope=federation
|
||||
cache-to: type=gha,mode=max,scope=federation
|
||||
|
||||
multiple-backend:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
env:
|
||||
CLOUDSERVER_IMAGE: ghcr.io/${{ github.repository }}/cloudserver:${{ github.sha }}
|
||||
CLOUDSERVER_IMAGE: ghcr.io/${{ github.repository }}:${{ github.sha }}
|
||||
S3BACKEND: mem
|
||||
S3_LOCATION_FILE: /usr/src/app/tests/locationConfig/locationConfigTests.json
|
||||
S3DATA: multiple
|
||||
JOB_NAME: ${{ github.job }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Setup CI environment
|
||||
uses: ./.github/actions/setup-ci
|
||||
- name: Setup CI services
|
||||
run: docker-compose up -d
|
||||
run: docker compose up -d
|
||||
working-directory: .github/docker
|
||||
- name: Run multiple backend test
|
||||
run: |-
|
||||
|
@ -194,7 +209,7 @@ jobs:
|
|||
env:
|
||||
S3_LOCATION_FILE: tests/locationConfig/locationConfigTests.json
|
||||
- name: Upload logs to artifacts
|
||||
uses: scality/action-artifacts@v3
|
||||
uses: scality/action-artifacts@v4
|
||||
with:
|
||||
method: upload
|
||||
url: https://artifacts.scality.net
|
||||
|
@ -209,13 +224,13 @@ jobs:
|
|||
env:
|
||||
S3BACKEND: file
|
||||
S3VAULT: mem
|
||||
CLOUDSERVER_IMAGE: ghcr.io/${{ github.repository }}/cloudserver:${{ github.sha }}
|
||||
CLOUDSERVER_IMAGE: ghcr.io/${{ github.repository }}:${{ github.sha }}
|
||||
MPU_TESTING: "yes"
|
||||
JOB_NAME: ${{ github.job }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Setup CI environment
|
||||
|
@ -225,7 +240,7 @@ jobs:
|
|||
sudo apt-get install -y libdigest-hmac-perl
|
||||
pip install 's3cmd==2.3.0'
|
||||
- name: Setup CI services
|
||||
run: docker-compose up -d
|
||||
run: docker compose up -d
|
||||
working-directory: .github/docker
|
||||
- name: Run file ft tests
|
||||
run: |-
|
||||
|
@ -233,7 +248,7 @@ jobs:
|
|||
bash wait_for_local_port.bash 8000 40
|
||||
yarn run ft_test | tee /tmp/artifacts/${{ github.job }}/tests.log
|
||||
- name: Upload logs to artifacts
|
||||
uses: scality/action-artifacts@v3
|
||||
uses: scality/action-artifacts@v4
|
||||
with:
|
||||
method: upload
|
||||
url: https://artifacts.scality.net
|
||||
|
@ -247,20 +262,20 @@ jobs:
|
|||
needs: build
|
||||
env:
|
||||
ENABLE_UTAPI_V2: t
|
||||
S3BACKEND: mem
|
||||
S3BACKEND: mem
|
||||
BUCKET_DENY_FILTER: utapi-event-filter-deny-bucket
|
||||
CLOUDSERVER_IMAGE: ghcr.io/${{ github.repository }}/cloudserver:${{ github.sha }}
|
||||
CLOUDSERVER_IMAGE: ghcr.io/${{ github.repository }}:${{ github.sha }}
|
||||
JOB_NAME: ${{ github.job }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Setup CI environment
|
||||
uses: ./.github/actions/setup-ci
|
||||
- name: Setup CI services
|
||||
run: docker-compose up -d
|
||||
run: docker compose up -d
|
||||
working-directory: .github/docker
|
||||
- name: Run file utapi v2 tests
|
||||
run: |-
|
||||
|
@ -268,7 +283,7 @@ jobs:
|
|||
bash wait_for_local_port.bash 8000 40
|
||||
yarn run test_utapi_v2 | tee /tmp/artifacts/${{ github.job }}/tests.log
|
||||
- name: Upload logs to artifacts
|
||||
uses: scality/action-artifacts@v3
|
||||
uses: scality/action-artifacts@v4
|
||||
with:
|
||||
method: upload
|
||||
url: https://artifacts.scality.net
|
||||
|
@ -284,12 +299,13 @@ jobs:
|
|||
S3BACKEND: file
|
||||
S3VAULT: mem
|
||||
MPU_TESTING: true
|
||||
CLOUDSERVER_IMAGE: ghcr.io/${{ github.repository }}/cloudserver:${{ github.sha }}
|
||||
CLOUDSERVER_IMAGE: ghcr.io/${{ github.repository }}:${{ github.sha }}
|
||||
PYKMIP_IMAGE: ghcr.io/${{ github.repository }}/pykmip:${{ github.sha }}
|
||||
JOB_NAME: ${{ github.job }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Setup CI environment
|
||||
|
@ -298,7 +314,7 @@ jobs:
|
|||
run: cp -r ./certs /tmp/ssl-kmip
|
||||
working-directory: .github/pykmip
|
||||
- name: Setup CI services
|
||||
run: docker-compose --profile pykmip up -d
|
||||
run: docker compose --profile pykmip up -d
|
||||
working-directory: .github/docker
|
||||
- name: Run file KMIP tests
|
||||
run: |-
|
||||
|
@ -307,7 +323,7 @@ jobs:
|
|||
bash wait_for_local_port.bash 5696 40
|
||||
yarn run ft_kmip | tee /tmp/artifacts/${{ github.job }}/tests.log
|
||||
- name: Upload logs to artifacts
|
||||
uses: scality/action-artifacts@v3
|
||||
uses: scality/action-artifacts@v4
|
||||
with:
|
||||
method: upload
|
||||
url: https://artifacts.scality.net
|
||||
|
|
10
constants.js
10
constants.js
|
@ -179,6 +179,16 @@ const constants = {
|
|||
assumedRoleArnResourceType: 'assumed-role',
|
||||
// Session name of the backbeat lifecycle assumed role session.
|
||||
backbeatLifecycleSessionName: 'backbeat-lifecycle',
|
||||
unsupportedSignatureChecksums: new Set([
|
||||
'STREAMING-UNSIGNED-PAYLOAD-TRAILER',
|
||||
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER',
|
||||
'STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD',
|
||||
'STREAMING-AWS4-ECDSA-P256-SHA256-PAYLOAD-TRAILER',
|
||||
]),
|
||||
supportedSignatureChecksums: new Set([
|
||||
'UNSIGNED-PAYLOAD',
|
||||
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD',
|
||||
]),
|
||||
};
|
||||
|
||||
module.exports = constants;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM registry.scality.com/federation/nodesvc-base:7.10.6.0
|
||||
FROM ghcr.io/scality/federation/nodesvc-base:7.10.6.0
|
||||
|
||||
ENV S3_CONFIG_FILE=${CONF_DIR}/config.json
|
||||
ENV S3_LOCATION_FILE=${CONF_DIR}/locationConfig.json
|
||||
|
|
|
@ -37,8 +37,10 @@ const AMZ_ABORT_ID_HEADER = 'x-amz-abort-rule-id';
|
|||
|
||||
function _generateExpHeadersObjects(rules, params, datetime) {
|
||||
const tags = {
|
||||
TagSet: Object.keys(params.tags)
|
||||
.map(key => ({ Key: key, Value: params.tags[key] })),
|
||||
TagSet: params.tags
|
||||
? Object.keys(params.tags)
|
||||
.map(key => ({ Key: key, Value: params.tags[key] }))
|
||||
: [],
|
||||
};
|
||||
|
||||
const objectInfo = { Key: params.key };
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
const { errors } = require('arsenal');
|
||||
|
||||
const { unsupportedSignatureChecksums, supportedSignatureChecksums } = require('../../../../constants');
|
||||
|
||||
function validateChecksumHeaders(headers) {
|
||||
// If the x-amz-trailer header is present the request is using one of the
|
||||
// trailing checksum algorithms, which are not supported.
|
||||
if (headers['x-amz-trailer'] !== undefined) {
|
||||
return errors.BadRequest.customizeDescription('trailing checksum is not supported');
|
||||
}
|
||||
|
||||
const signatureChecksum = headers['x-amz-content-sha256'];
|
||||
if (signatureChecksum === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (supportedSignatureChecksums.has(signatureChecksum)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// If the value is not one of the possible checksum algorithms
|
||||
// the only other valid value is the actual sha256 checksum of the payload.
|
||||
// Do a simple sanity check of the length to guard against future algos.
|
||||
// If the value is an unknown algo, then it will fail checksum validation.
|
||||
if (!unsupportedSignatureChecksums.has(signatureChecksum) && signatureChecksum.length === 64) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return errors.BadRequest.customizeDescription('unsupported checksum algorithm');
|
||||
}
|
||||
|
||||
module.exports = validateChecksumHeaders;
|
|
@ -14,6 +14,7 @@ const { hasNonPrintables } = require('../utilities/stringChecks');
|
|||
const kms = require('../kms/wrapper');
|
||||
const { config } = require('../Config');
|
||||
const { setExpirationHeaders } = require('./apiUtils/object/expirationHeaders');
|
||||
const validateChecksumHeaders = require('./apiUtils/object/validateChecksumHeaders');
|
||||
|
||||
const writeContinue = require('../utilities/writeContinue');
|
||||
const versionIdUtils = versioning.VersionID;
|
||||
|
@ -67,6 +68,11 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
|
|||
));
|
||||
}
|
||||
|
||||
const checksumHeaderErr = validateChecksumHeaders(headers);
|
||||
if (checksumHeaderErr) {
|
||||
return callback(checksumHeaderErr);
|
||||
}
|
||||
|
||||
log.trace('owner canonicalID to send to data', { canonicalID });
|
||||
|
||||
return metadataValidateBucketAndObj(valParams, log,
|
||||
|
|
|
@ -18,6 +18,8 @@ const locationConstraintCheck
|
|||
= require('./apiUtils/object/locationConstraintCheck');
|
||||
const writeContinue = require('../utilities/writeContinue');
|
||||
const { getObjectSSEConfiguration } = require('./apiUtils/bucket/bucketEncryption');
|
||||
const validateChecksumHeaders = require('./apiUtils/object/validateChecksumHeaders');
|
||||
|
||||
const skipError = new Error('skip');
|
||||
|
||||
// We pad the partNumbers so that the parts will be sorted in numerical order.
|
||||
|
@ -61,6 +63,11 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
|
|||
return cb(errors.EntityTooLarge);
|
||||
}
|
||||
|
||||
const checksumHeaderErr = validateChecksumHeaders(request.headers);
|
||||
if (checksumHeaderErr) {
|
||||
return cb(checksumHeaderErr);
|
||||
}
|
||||
|
||||
// Note: Part sizes cannot be less than 5MB in size except for the last.
|
||||
// However, we do not check this value here because we cannot know which
|
||||
// part will be the last until a complete MPU request is made. Thus, we let
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "s3",
|
||||
"version": "7.10.30",
|
||||
"version": "7.10.30-3",
|
||||
"description": "S3 connector",
|
||||
"main": "index.js",
|
||||
"engines": {
|
||||
|
@ -20,7 +20,7 @@
|
|||
"homepage": "https://github.com/scality/S3#readme",
|
||||
"dependencies": {
|
||||
"@hapi/joi": "^17.1.0",
|
||||
"arsenal": "git+https://github.com/scality/arsenal#7.10.47",
|
||||
"arsenal": "git+https://github.com/scality/arsenal#7.10.47-1",
|
||||
"async": "~2.5.0",
|
||||
"aws-sdk": "2.905.0",
|
||||
"azure-storage": "^2.1.0",
|
||||
|
|
|
@ -30,6 +30,33 @@ function getPolicyParams(paramToChange) {
|
|||
};
|
||||
}
|
||||
|
||||
function getPolicyParamsWithId(paramToChange, policyId) {
|
||||
const newParam = {};
|
||||
const bucketPolicy = {
|
||||
Version: '2012-10-17',
|
||||
Id: policyId,
|
||||
Statement: [basicStatement],
|
||||
};
|
||||
if (paramToChange) {
|
||||
newParam[paramToChange.key] = paramToChange.value;
|
||||
bucketPolicy.Statement[0] = Object.assign({}, basicStatement, newParam);
|
||||
}
|
||||
return {
|
||||
Bucket: bucket,
|
||||
Policy: JSON.stringify(bucketPolicy),
|
||||
};
|
||||
}
|
||||
|
||||
function generateRandomString(length) {
|
||||
// All allowed characters matching the regex in arsenal
|
||||
const allowedCharacters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789+=,.@ -/';
|
||||
const allowedCharactersLength = allowedCharacters.length;
|
||||
|
||||
return [...Array(length)]
|
||||
.map(() => allowedCharacters[~~(Math.random() * allowedCharactersLength)])
|
||||
.join('');
|
||||
}
|
||||
|
||||
// Check for the expected error response code and status code.
|
||||
function assertError(err, expectedErr, cb) {
|
||||
if (expectedErr === null) {
|
||||
|
@ -102,5 +129,31 @@ describe('aws-sdk test put bucket policy', () => {
|
|||
s3.putBucketPolicy(params, err =>
|
||||
assertError(err, 'MalformedPolicy', done));
|
||||
});
|
||||
|
||||
it('should return MalformedPolicy because Id is not a string',
|
||||
done => {
|
||||
const params = getPolicyParamsWithId(null, 59);
|
||||
s3.putBucketPolicy(params, err =>
|
||||
assertError(err, 'MalformedPolicy', done));
|
||||
});
|
||||
|
||||
it('should put a bucket policy on bucket since Id is a string',
|
||||
done => {
|
||||
const params = getPolicyParamsWithId(null, 'cd3ad3d9-2776-4ef1-a904-4c229d1642e');
|
||||
s3.putBucketPolicy(params, err =>
|
||||
assertError(err, null, done));
|
||||
});
|
||||
|
||||
it('should allow bucket policy with pincipal arn less than 2048 characters', done => {
|
||||
const params = getPolicyParams({ key: 'Principal', value: { AWS: `arn:aws:iam::767707094035:user/${generateRandomString(150)}` } }); // eslint-disable-line max-len
|
||||
s3.putBucketPolicy(params, err =>
|
||||
assertError(err, null, done));
|
||||
});
|
||||
|
||||
it('should not allow bucket policy with pincipal arn more than 2048 characters', done => {
|
||||
const params = getPolicyParams({ key: 'Principal', value: { AWS: `arn:aws:iam::767707094035:user/${generateRandomString(2020)}` } }); // eslint-disable-line max-len
|
||||
s3.putBucketPolicy(params, err =>
|
||||
assertError(err, 'MalformedPolicy', done));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -0,0 +1,70 @@
|
|||
const assert = require('assert');
|
||||
const { makeS3Request } = require('../utils/makeRequest');
|
||||
const HttpRequestAuthV4 = require('../utils/HttpRequestAuthV4');
|
||||
|
||||
const bucket = 'testunsupportedchecksumsbucket';
|
||||
const objectKey = 'key';
|
||||
const objData = Buffer.alloc(1024, 'a');
|
||||
|
||||
const authCredentials = {
|
||||
accessKey: 'accessKey1',
|
||||
secretKey: 'verySecretKey1',
|
||||
};
|
||||
|
||||
const itSkipIfAWS = process.env.AWS_ON_AIR ? it.skip : it;
|
||||
|
||||
describe('unsupported checksum requests:', () => {
|
||||
before(done => {
|
||||
makeS3Request({
|
||||
method: 'PUT',
|
||||
authCredentials,
|
||||
bucket,
|
||||
}, err => {
|
||||
assert.ifError(err);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
after(done => {
|
||||
makeS3Request({
|
||||
method: 'DELETE',
|
||||
authCredentials,
|
||||
bucket,
|
||||
}, err => {
|
||||
assert.ifError(err);
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
itSkipIfAWS('should respond with BadRequest for trailing checksum', done => {
|
||||
const req = new HttpRequestAuthV4(
|
||||
`http://localhost:8000/${bucket}/${objectKey}`,
|
||||
Object.assign(
|
||||
{
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'content-length': objData.length,
|
||||
'x-amz-content-sha256': 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER',
|
||||
'x-amz-trailer': 'x-amz-checksum-sha256',
|
||||
},
|
||||
},
|
||||
authCredentials
|
||||
),
|
||||
res => {
|
||||
assert.strictEqual(res.statusCode, 400);
|
||||
res.on('data', () => {});
|
||||
res.on('end', done);
|
||||
}
|
||||
);
|
||||
|
||||
req.on('error', err => {
|
||||
assert.ifError(err);
|
||||
});
|
||||
|
||||
req.write(objData);
|
||||
|
||||
req.once('drain', () => {
|
||||
req.end();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -103,6 +103,16 @@ describe('generateExpirationHeaders', () => {
|
|||
},
|
||||
{},
|
||||
],
|
||||
[
|
||||
'should provide correct headers for compatibility with legacy objects missing the tags property',
|
||||
{
|
||||
lifecycleConfig: lifecycleExpirationDays,
|
||||
objectParams: { key: 'object', date: objectDate },
|
||||
},
|
||||
{
|
||||
'x-amz-expiration': `expiry-date="${expectedDaysExpiryDate}", rule-id="test-days"`,
|
||||
},
|
||||
],
|
||||
[
|
||||
'should return correct headers for object (days)',
|
||||
{
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
const assert = require('assert');
|
||||
|
||||
const validateChecksumHeaders = require('../../../../lib/api/apiUtils/object/validateChecksumHeaders');
|
||||
const { unsupportedSignatureChecksums, supportedSignatureChecksums } = require('../../../../constants');
|
||||
|
||||
const passingCases = [
|
||||
{
|
||||
description: 'should return null if no checksum headers are present',
|
||||
headers: {},
|
||||
},
|
||||
{
|
||||
description: 'should return null if UNSIGNED-PAYLOAD is used',
|
||||
headers: {
|
||||
'x-amz-content-sha256': 'UNSIGNED-PAYLOAD',
|
||||
},
|
||||
},
|
||||
{
|
||||
description: 'should return null if a sha256 checksum is used',
|
||||
headers: {
|
||||
'x-amz-content-sha256': 'thisIs64CharactersLongAndThatsAllWeCheckFor1234567890abcdefghijk',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
supportedSignatureChecksums.forEach(checksum => {
|
||||
passingCases.push({
|
||||
description: `should return null if ${checksum} is used`,
|
||||
headers: {
|
||||
'x-amz-content-sha256': checksum,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const failingCases = [
|
||||
{
|
||||
description: 'should return BadRequest if a trailing checksum is used',
|
||||
headers: {
|
||||
'x-amz-trailer': 'test',
|
||||
},
|
||||
},
|
||||
{
|
||||
description: 'should return BadRequest if an unknown algo is used',
|
||||
headers: {
|
||||
'x-amz-content-sha256': 'UNSUPPORTED-CHECKSUM',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
unsupportedSignatureChecksums.forEach(checksum => {
|
||||
failingCases.push({
|
||||
description: `should return BadRequest if ${checksum} is used`,
|
||||
headers: {
|
||||
'x-amz-content-sha256': checksum,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
describe('validateChecksumHeaders', () => {
|
||||
passingCases.forEach(testCase => {
|
||||
it(testCase.description, () => {
|
||||
const result = validateChecksumHeaders(testCase.headers);
|
||||
assert.ifError(result);
|
||||
});
|
||||
});
|
||||
|
||||
failingCases.forEach(testCase => {
|
||||
it(testCase.description, () => {
|
||||
const result = validateChecksumHeaders(testCase.headers);
|
||||
assert(result instanceof Error, 'Expected an error to be returned');
|
||||
assert.strictEqual(result.is.BadRequest, true);
|
||||
assert.strictEqual(result.code, 400);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -488,9 +488,9 @@ arraybuffer.slice@~0.0.7:
|
|||
optionalDependencies:
|
||||
ioctl "^2.0.2"
|
||||
|
||||
"arsenal@git+https://github.com/scality/arsenal#7.10.47":
|
||||
version "7.10.47"
|
||||
resolved "git+https://github.com/scality/arsenal#3f24336b83581d121f52146b8003e0a68d9ce876"
|
||||
"arsenal@git+https://github.com/scality/arsenal#7.10.47-1":
|
||||
version "7.10.47-1"
|
||||
resolved "git+https://github.com/scality/arsenal#7f4192a727a3f01fbffa8b7edc062f933f9ef1b1"
|
||||
dependencies:
|
||||
"@types/async" "^3.2.12"
|
||||
"@types/utf8" "^3.0.1"
|
||||
|
|
Loading…
Reference in New Issue