Compare commits

...

7 Commits

Author SHA1 Message Date
Alexander Chan a804fe1863 CLDSRV-387: dev/7.4 gh actions 2023-06-01 14:18:55 -07:00
bert-e 01ae5e0ab4 Merge branch 'bugfix/S3C-5390-s3api_head-object_with_part-number_1_on_empty_file_fails-hotfix-7.4.10' into q/7.4.10.2 2022-04-12 19:05:07 +00:00
Artem Bakalov 2350bf3518 remove .only
(cherry picked from commit b4725aa032)

lint
2022-04-12 11:47:03 -07:00
Artem Bakalov 74c44e1726 S3C-5390 s3api head-object with part-number 1 on empty file fails: httpCode 416
(cherry picked from commit 4f3195a6ca)
2022-04-12 11:02:34 -07:00
Jonathan Gramain 00b2e6b8a4 [hotfix 7.4.10] CLDSRV-170 update eslint dependency 2022-04-12 10:33:42 -07:00
Jonathan Gramain 57804fa826 bugfix: CLDSRV-170 skip orphan cleanup in UploadPart[Copy]
Do not delete orphan data in UploadPart/UploadPartCopy on overwrite
iff a CompleteMPU of the target MPU is already in progress.

This is to prevent a race condition where a CompleteMPU is running
while UploadPart is uploading a part for the same MPU.

It leaves an orphan in storage since only one of the upload data will
be present in the finished MPU, but the window is limited to the
CompleteMPU execution and should only occur when there are retries of
UploadPart due to prior stuck requests, or with broken clients
misusing the MPU API, so it should be acceptable.

Implementation details:

- set a flag in the MPU overview key when starting the CompleteMPU
  process, before listing the parts from metadata to construct the
  manifest

- in UploadPart/UploadPartCopy, after the part metadata is written and
  if the same part already existed, re-fetch the MPU overview key to
  check the flag: if set, skip the deletion of the old data of this
  part, since the CompleteMPU process in progress may choose either
  part data depending on the exact timing of the listing vs. the
  part overwrite.

(cherry picked from commit 8496111518)
2022-04-12 09:38:08 -07:00
Taylor McKinnon 6c612cce2d ft(CLDSRV-102): Add Aborted MPU PUT
(cherry picked from commit 9eba583eb0)
2022-03-09 15:24:30 -08:00
31 changed files with 1380 additions and 707 deletions

32
.github/actions/setup-ci/action.yaml vendored Normal file
View File

@ -0,0 +1,32 @@
---
name: "Setup CI environment"
description: "Setup Cloudserver CI environment"
runs:
using: composite
steps:
- name: Setup etc/hosts
shell: bash
run: sudo echo "127.0.0.1 bucketwebsitetester.s3-website-us-east-1.amazonaws.com" | sudo tee -a /etc/hosts
- name: Setup Credentials
shell: bash
run: bash .github/scripts/credentials.bash
- name: Setup job artifacts directory
shell: bash
run: |-
set -exu;
mkdir -p /tmp/artifacts/${{ github.job }}/;
- uses: actions/setup-node@v2
with:
node-version: '10'
cache: 'yarn'
- name: install dependencies
shell: bash
run: yarn install --ignore-engines --frozen-lockfile --network-concurrency 1
- uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip
- name: Install python deps
shell: bash
run: pip install docker-compose

37
.github/docker/creds.env vendored Normal file
View File

@ -0,0 +1,37 @@
azurebackend_AZURE_STORAGE_ACCESS_KEY
azurebackend_AZURE_STORAGE_ACCOUNT_NAME
azurebackend_AZURE_STORAGE_ENDPOINT
azurebackend2_AZURE_STORAGE_ACCESS_KEY
azurebackend2_AZURE_STORAGE_ACCOUNT_NAME
azurebackend2_AZURE_STORAGE_ENDPOINT
azurebackendmismatch_AZURE_STORAGE_ACCESS_KEY
azurebackendmismatch_AZURE_STORAGE_ACCOUNT_NAME
azurebackendmismatch_AZURE_STORAGE_ENDPOINT
azurenonexistcontainer_AZURE_STORAGE_ACCESS_KEY
azurenonexistcontainer_AZURE_STORAGE_ACCOUNT_NAME
azurenonexistcontainer_AZURE_STORAGE_ENDPOINT
azuretest_AZURE_BLOB_ENDPOINT
b2backend_B2_ACCOUNT_ID
b2backend_B2_STORAGE_ACCESS_KEY
GOOGLE_SERVICE_EMAIL
GOOGLE_SERVICE_KEY
AWS_S3_BACKEND_ACCESS_KEY
AWS_S3_BACKEND_SECRET_KEY
AWS_S3_BACKEND_ACCESS_KEY_2
AWS_S3_BACKEND_SECRET_KEY_2
AWS_GCP_BACKEND_ACCESS_KEY
AWS_GCP_BACKEND_SECRET_KEY
AWS_GCP_BACKEND_ACCESS_KEY_2
AWS_GCP_BACKEND_SECRET_KEY_2
b2backend_B2_STORAGE_ENDPOINT
gcpbackend2_GCP_SERVICE_EMAIL
gcpbackend2_GCP_SERVICE_KEY
gcpbackend2_GCP_SERVICE_KEYFILE
gcpbackend_GCP_SERVICE_EMAIL
gcpbackend_GCP_SERVICE_KEY
gcpbackendmismatch_GCP_SERVICE_EMAIL
gcpbackendmismatch_GCP_SERVICE_KEY
gcpbackend_GCP_SERVICE_KEYFILE
gcpbackendmismatch_GCP_SERVICE_KEYFILE
gcpbackendnoproxy_GCP_SERVICE_KEYFILE
gcpbackendproxy_GCP_SERVICE_KEYFILE

49
.github/docker/docker-compose.yaml vendored Normal file
View File

@ -0,0 +1,49 @@
services:
cloudserver:
image: ${CLOUDSERVER_IMAGE}
command: sh -c "yarn start > /artifacts/s3.log"
network_mode: "host"
volumes:
- /tmp/ssl:/ssl
- /tmp/ssl-kmip:/ssl-kmip
- ${HOME}/.aws/credentials:/root/.aws/credentials
- /tmp/artifacts/${JOB_NAME}:/artifacts
environment:
- CI=true
- ENABLE_LOCAL_CACHE=true
- REDIS_HOST=0.0.0.0
- REDIS_PORT=6379
- REPORT_TOKEN=report-token-1
- REMOTE_MANAGEMENT_DISABLE=1
- HEALTHCHECKS_ALLOWFROM=0.0.0.0/0
- DATA_HOST=0.0.0.0
- METADATA_HOST=0.0.0.0
- S3BACKEND
- S3DATA
- MPU_TESTING
- S3VAULT
- S3_LOCATION_FILE
env_file:
- creds.env
depends_on:
- redis
extra_hosts:
- "bucketwebsitetester.s3-website-us-east-1.amazonaws.com:127.0.0.1"
- "pykmip.local:127.0.0.1"
redis:
image: redis:alpine
network_mode: "host"
squid:
network_mode: "host"
profiles: ['ci-proxy']
image: scality/ci-squid
command: >-
sh -c 'mkdir -p /ssl &&
openssl req -new -newkey rsa:2048 -sha256 -days 365 -nodes -x509 \
-subj "/C=US/ST=Country/L=City/O=Organization/CN=CN=scality-proxy" \
-keyout /ssl/myca.pem -out /ssl/myca.pem &&
cp /ssl/myca.pem /ssl/CA.pem &&
squid -f /etc/squid/squid.conf -N -z &&
squid -f /etc/squid/squid.conf -NYCd 1'
volumes:
- /tmp/ssl:/ssl

View File

@ -2,9 +2,9 @@
set -x #echo on
set -e #exit at the first error
mkdir -p ~/.aws
mkdir -p $HOME/.aws
cat >>/root/.aws/credentials <<EOF
cat >>$HOME/.aws/credentials <<EOF
[default]
aws_access_key_id = $AWS_S3_BACKEND_ACCESS_KEY
aws_secret_access_key = $AWS_S3_BACKEND_SECRET_KEY

239
.github/workflows/tests.yaml vendored Normal file
View File

@ -0,0 +1,239 @@
---
name: tests
on:
workflow_dispatch:
push:
branches-ignore:
- 'development/**'
- 'q/*/**'
env:
# Secrets
azurebackend_AZURE_STORAGE_ACCESS_KEY: >-
${{ secrets.AZURE_STORAGE_ACCESS_KEY }}
azurebackend_AZURE_STORAGE_ACCOUNT_NAME: >-
${{ secrets.AZURE_STORAGE_ACCOUNT_NAME }}
azurebackend_AZURE_STORAGE_ENDPOINT: >-
${{ secrets.AZURE_STORAGE_ENDPOINT }}
azurebackend2_AZURE_STORAGE_ACCESS_KEY: >-
${{ secrets.AZURE_STORAGE_ACCESS_KEY_2 }}
azurebackend2_AZURE_STORAGE_ACCOUNT_NAME: >-
${{ secrets.AZURE_STORAGE_ACCOUNT_NAME_2 }}
azurebackend2_AZURE_STORAGE_ENDPOINT: >-
${{ secrets.AZURE_STORAGE_ENDPOINT_2 }}
azurebackendmismatch_AZURE_STORAGE_ACCESS_KEY: >-
${{ secrets.AZURE_STORAGE_ACCESS_KEY }}
azurebackendmismatch_AZURE_STORAGE_ACCOUNT_NAME: >-
${{ secrets.AZURE_STORAGE_ACCOUNT_NAME }}
azurebackendmismatch_AZURE_STORAGE_ENDPOINT: >-
${{ secrets.AZURE_STORAGE_ENDPOINT }}
azurenonexistcontainer_AZURE_STORAGE_ACCESS_KEY: >-
${{ secrets.AZURE_STORAGE_ACCESS_KEY }}
azurenonexistcontainer_AZURE_STORAGE_ACCOUNT_NAME: >-
${{ secrets.AZURE_STORAGE_ACCOUNT_NAME }}
azurenonexistcontainer_AZURE_STORAGE_ENDPOINT: >-
${{ secrets.AZURE_STORAGE_ENDPOINT }}
azuretest_AZURE_BLOB_ENDPOINT: "${{ secrets.AZURE_STORAGE_ENDPOINT }}"
b2backend_B2_ACCOUNT_ID: "${{ secrets.B2BACKEND_B2_ACCOUNT_ID }}"
b2backend_B2_STORAGE_ACCESS_KEY: >-
${{ secrets.B2BACKEND_B2_STORAGE_ACCESS_KEY }}
GOOGLE_SERVICE_EMAIL: "${{ secrets.GCP_SERVICE_EMAIL }}"
GOOGLE_SERVICE_KEY: "${{ secrets.GCP_SERVICE_KEY }}"
AWS_S3_BACKEND_ACCESS_KEY: "${{ secrets.AWS_S3_BACKEND_ACCESS_KEY }}"
AWS_S3_BACKEND_SECRET_KEY: "${{ secrets.AWS_S3_BACKEND_SECRET_KEY }}"
AWS_S3_BACKEND_ACCESS_KEY_2: "${{ secrets.AWS_S3_BACKEND_ACCESS_KEY_2 }}"
AWS_S3_BACKEND_SECRET_KEY_2: "${{ secrets.AWS_S3_BACKEND_SECRET_KEY_2 }}"
AWS_GCP_BACKEND_ACCESS_KEY: "${{ secrets.AWS_GCP_BACKEND_ACCESS_KEY }}"
AWS_GCP_BACKEND_SECRET_KEY: "${{ secrets.AWS_GCP_BACKEND_SECRET_KEY }}"
AWS_GCP_BACKEND_ACCESS_KEY_2: "${{ secrets.AWS_GCP_BACKEND_ACCESS_KEY_2 }}"
AWS_GCP_BACKEND_SECRET_KEY_2: "${{ secrets.AWS_GCP_BACKEND_SECRET_KEY_2 }}"
b2backend_B2_STORAGE_ENDPOINT: "${{ secrets.B2BACKEND_B2_STORAGE_ENDPOINT }}"
gcpbackend2_GCP_SERVICE_EMAIL: "${{ secrets.GCP2_SERVICE_EMAIL }}"
gcpbackend2_GCP_SERVICE_KEY: "${{ secrets.GCP2_SERVICE_KEY }}"
gcpbackend2_GCP_SERVICE_KEYFILE: /root/.gcp/servicekey
gcpbackend_GCP_SERVICE_EMAIL: "${{ secrets.GCP_SERVICE_EMAIL }}"
gcpbackend_GCP_SERVICE_KEY: "${{ secrets.GCP_SERVICE_KEY }}"
gcpbackendmismatch_GCP_SERVICE_EMAIL: >-
${{ secrets.GCPBACKENDMISMATCH_GCP_SERVICE_EMAIL }}
gcpbackendmismatch_GCP_SERVICE_KEY: >-
${{ secrets.GCPBACKENDMISMATCH_GCP_SERVICE_KEY }}
gcpbackend_GCP_SERVICE_KEYFILE: /root/.gcp/servicekey
gcpbackendmismatch_GCP_SERVICE_KEYFILE: /root/.gcp/servicekey
gcpbackendnoproxy_GCP_SERVICE_KEYFILE: /root/.gcp/servicekey
gcpbackendproxy_GCP_SERVICE_KEYFILE: /root/.gcp/servicekey
# Configs
ENABLE_LOCAL_CACHE: "true"
REPORT_TOKEN: "report-token-1"
REMOTE_MANAGEMENT_DISABLE: "1"
jobs:
linting-coverage:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '10'
cache: yarn
- name: install dependencies
run: yarn install --frozen-lockfile --network-concurrency 1
- uses: actions/setup-python@v4
with:
python-version: '3.9'
- uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip
- name: Install python deps
run: pip install flake8
- name: Lint Javascript
run: yarn run --silent lint -- --max-warnings 0
- name: Lint Markdown
run: yarn run --silent lint_md
- name: Lint python
run: flake8 $(git ls-files "*.py")
- name: Lint Yaml
run: yamllint -c yamllint.yml $(git ls-files "*.yml")
- name: Unit Coverage
run: |
set -ex
mkdir -p $CIRCLE_TEST_REPORTS/unit
yarn test
yarn run test_legacy_location
env:
S3_LOCATION_FILE: tests/locationConfig/locationConfigTests.json
CIRCLE_TEST_REPORTS: /tmp
CIRCLE_ARTIFACTS: /tmp
CI_REPORTS: /tmp
- name: Unit Coverage logs
run: find /tmp/unit -exec cat {} \;
- name: preparing junit files for upload
run: |
mkdir -p artifacts/junit
find . -name "*junit*.xml" -exec cp {} artifacts/junit/ ";"
if: always()
- name: Upload files to artifacts
uses: scality/action-artifacts@v2
with:
method: upload
url: https://artifacts.scality.net
user: ${{ secrets.ARTIFACTS_USER }}
password: ${{ secrets.ARTIFACTS_PASSWORD }}
source: artifacts
if: always()
build:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1.6.0
- name: Login to GitHub Registry
uses: docker/login-action@v1.10.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Registry
uses: docker/login-action@v1
with:
registry: registry.scality.com
username: ${{ secrets.REGISTRY_LOGIN }}
password: ${{ secrets.REGISTRY_PASSWORD }}
- name: Build and push cloudserver image
uses: docker/build-push-action@v3
with:
push: true
context: .
provenance: false
tags: |
ghcr.io/${{ github.repository }}/cloudserver:${{ github.sha }}
registry.scality.com/cloudserver-dev/cloudserver:${{ github.sha }}
cache-from: type=gha,scope=cloudserver
cache-to: type=gha,mode=max,scope=cloudserver
multiple-backend:
runs-on: ubuntu-latest
needs: build
env:
CLOUDSERVER_IMAGE: ghcr.io/${{ github.repository }}/cloudserver:${{ github.sha }}
S3BACKEND: mem
S3_LOCATION_FILE: /usr/src/app/tests/locationConfig/locationConfigTests.json
S3DATA: multiple
JOB_NAME: ${{ github.job }}
steps:
- name: Checkout
uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: 3.9
- name: Setup CI environment
uses: ./.github/actions/setup-ci
- name: Setup CI services
run: docker-compose up -d
working-directory: .github/docker
- name: Run multiple backend test
run: |-
set -o pipefail;
bash wait_for_local_port.bash 8000 40
yarn run multiple_backend_test | tee /tmp/artifacts/${{ github.job }}/tests.log
env:
S3_LOCATION_FILE: tests/locationConfig/locationConfigTests.json
- name: Upload logs to artifacts
uses: scality/action-artifacts@v3
with:
method: upload
url: https://artifacts.scality.net
user: ${{ secrets.ARTIFACTS_USER }}
password: ${{ secrets.ARTIFACTS_PASSWORD }}
source: /tmp/artifacts
if: always()
file-ft-tests:
runs-on: ubuntu-latest
needs: build
env:
S3BACKEND: file
S3VAULT: mem
CLOUDSERVER_IMAGE: ghcr.io/${{ github.repository }}/cloudserver:${{ github.sha }}
MPU_TESTING: "yes"
JOB_NAME: ${{ github.job }}
steps:
- name: Checkout
uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: |
2.7
3.9
- name: Setup CI environment
uses: ./.github/actions/setup-ci
- name: Install python2 environment
run: |
sudo apt-get install -y libdigest-hmac-perl
pip install virtualenv==20.21.0
virtualenv -p $(which python2) ~/.virtualenv/py2
source ~/.virtualenv/py2/bin/activate
pip install 's3cmd==1.6.1'
- name: Setup CI services
run: docker-compose up -d
working-directory: .github/docker
- name: Run file ft tests
run: |-
set -o pipefail;
bash wait_for_local_port.bash 8000 40
source ~/.virtualenv/py2/bin/activate
yarn run ft_test | tee /tmp/artifacts/${{ github.job }}/tests.log
- name: Upload logs to artifacts
uses: scality/action-artifacts@v3
with:
method: upload
url: https://artifacts.scality.net
user: ${{ secrets.ARTIFACTS_USER }}
password: ${{ secrets.ARTIFACTS_PASSWORD }}
source: /tmp/artifacts
if: always()

View File

@ -1,4 +1,4 @@
FROM node:10.22.0-slim
FROM node:10.22.0-buster-slim
MAINTAINER Giorgio Regni <gr@scality.com>
WORKDIR /usr/src/app

View File

@ -1,13 +0,0 @@
#!/bin/bash
script_full_path=$(readlink -f "$0")
file_dir=$(dirname "$script_full_path")/..
PACKAGE_VERSION=$(cat $file_dir/package.json \
| grep version \
| head -1 \
| awk -F: '{ print $2 }' \
| sed 's/[",]//g' \
| tr -d '[[:space:]]')
echo $PACKAGE_VERSION

View File

@ -1,293 +0,0 @@
---
version: 0.2
branches:
feature/*, improvement/*, bugfix/*, w/*, q/*, hotfix/*, dependabot/*:
stage: pre-merge
development/*:
stage: post-merge
models:
- env: &global-env
azurebackend_AZURE_STORAGE_ACCESS_KEY: >-
%(secret:azure_storage_access_key)s
azurebackend_AZURE_STORAGE_ACCOUNT_NAME: >-
%(secret:azure_storage_account_name)s
azurebackend_AZURE_STORAGE_ENDPOINT: >-
%(secret:azure_storage_endpoint)s
azurebackend2_AZURE_STORAGE_ACCESS_KEY: >-
%(secret:azure_storage_access_key_2)s
azurebackend2_AZURE_STORAGE_ACCOUNT_NAME: >-
%(secret:azure_storage_account_name_2)s
azurebackend2_AZURE_STORAGE_ENDPOINT: >-
%(secret:azure_storage_endpoint_2)s
azurebackendmismatch_AZURE_STORAGE_ACCESS_KEY: >-
%(secret:azure_storage_access_key)s
azurebackendmismatch_AZURE_STORAGE_ACCOUNT_NAME: >-
%(secret:azure_storage_account_name)s
azurebackendmismatch_AZURE_STORAGE_ENDPOINT: >-
%(secret:azure_storage_endpoint)s
azurenonexistcontainer_AZURE_STORAGE_ACCESS_KEY: >-
%(secret:azure_storage_access_key)s
azurenonexistcontainer_AZURE_STORAGE_ACCOUNT_NAME: >-
%(secret:azure_storage_account_name)s
azurenonexistcontainer_AZURE_STORAGE_ENDPOINT: >-
%(secret:azure_storage_endpoint)s
azuretest_AZURE_BLOB_ENDPOINT: "%(secret:azure_storage_endpoint)s"
b2backend_B2_ACCOUNT_ID: "%(secret:b2backend_b2_account_id)s"
b2backend_B2_STORAGE_ACCESS_KEY: >-
%(secret:b2backend_b2_storage_access_key)s
GOOGLE_SERVICE_EMAIL: "%(secret:gcp_service_email)s"
GOOGLE_SERVICE_KEY: "%(secret:gcp_service_key)s"
AWS_S3_BACKEND_ACCESS_KEY: "%(secret:aws_s3_backend_access_key)s"
AWS_S3_BACKEND_SECRET_KEY: "%(secret:aws_s3_backend_secret_key)s"
AWS_S3_BACKEND_ACCESS_KEY_2: "%(secret:aws_s3_backend_access_key_2)s"
AWS_S3_BACKEND_SECRET_KEY_2: "%(secret:aws_s3_backend_secret_key_2)s"
AWS_GCP_BACKEND_ACCESS_KEY: "%(secret:aws_gcp_backend_access_key)s"
AWS_GCP_BACKEND_SECRET_KEY: "%(secret:aws_gcp_backend_secret_key)s"
AWS_GCP_BACKEND_ACCESS_KEY_2: "%(secret:aws_gcp_backend_access_key_2)s"
AWS_GCP_BACKEND_SECRET_KEY_2: "%(secret:aws_gcp_backend_secret_key_2)s"
b2backend_B2_STORAGE_ENDPOINT: "%(secret:b2backend_b2_storage_endpoint)s"
gcpbackend2_GCP_SERVICE_EMAIL: "%(secret:gcp2_service_email)s"
gcpbackend2_GCP_SERVICE_KEY: "%(secret:gcp2_service_key)s"
gcpbackend2_GCP_SERVICE_KEYFILE: /root/.gcp/servicekey
gcpbackend_GCP_SERVICE_EMAIL: "%(secret:gcp_service_email)s"
gcpbackend_GCP_SERVICE_KEY: "%(secret:gcp_service_key)s"
gcpbackendmismatch_GCP_SERVICE_EMAIL: >-
%(secret:gcpbackendmismatch_gcp_service_email)s
gcpbackendmismatch_GCP_SERVICE_KEY: >-
%(secret:gcpbackendmismatch_gcp_service_key)s
gcpbackend_GCP_SERVICE_KEYFILE: /root/.gcp/servicekey
gcpbackendmismatch_GCP_SERVICE_KEYFILE: /root/.gcp/servicekey
gcpbackendnoproxy_GCP_SERVICE_KEYFILE: /root/.gcp/servicekey
gcpbackendproxy_GCP_SERVICE_KEYFILE: /root/.gcp/servicekey
- env: &multiple-backend-vars
S3BACKEND: "mem"
S3DATA: "multiple"
- env: &file-mem-mpu
S3BACKEND: "file"
S3VAULT: "mem"
MPU_TESTING: "yes"
- Git: &clone
name: Pull repo
repourl: '%(prop:git_reference)s'
shallow: true
retryFetch: true
haltOnFailure: true
- ShellCommand: &credentials
name: Setup Credentials
command: bash eve/workers/build/credentials.bash
haltOnFailure: true
env: *global-env
- ShellCommand: &node_version
name: get node version
command: node -v
- ShellCommand: &yarn-install
name: install modules
command: yarn install --ignore-engines --frozen-lockfile
haltOnFailure: true
- ShellCommand: &check-s3-action-logs
name: Check s3 action logs
command: |
LOGS=`cat /artifacts/s3.log | grep 'No actionLog'`
test `echo -n ${LOGS} | wc -l` -eq 0 || (echo $LOGS && false)
- Upload: &upload-artifacts
source: /artifacts
urls:
- "*"
- ShellCommand: &follow-s3-log
logfiles:
s3:
filename: /artifacts/s3.log
follow: true
- ShellCommand: &setup-junit-upload
name: preparing junit files for upload
command: |
mkdir -p artifacts/junit
find . -name "*junit*.xml" -exec cp {} artifacts/junit/ ";"
alwaysRun: true
- Upload: &upload-junits
source: artifacts
urls:
- "*"
alwaysRun: true
- ShellCommand: &setup-github-ssh
name: setup ssh with github
command: |
mkdir -p ~/.ssh
ssh-keyscan -H github.com > ~/.ssh/ssh_known_hosts
stages:
pre-merge:
worker:
type: local
steps:
- TriggerStages:
name: Launch all workers
stage_names:
- linting-coverage
- file-ft-tests
- multiple-backend-test
waitForFinish: true
haltOnFailure: true
linting-coverage:
worker:
type: docker
path: eve/workers/build
volumes: &default_volumes
- '/home/eve/workspace'
steps:
- Git: *clone
- ShellCommand: *setup-github-ssh
- ShellCommand: *yarn-install
- ShellCommand: *credentials
- ShellCommand:
name: Linting
command: |
set -ex
yarn run --silent lint -- --max-warnings 0
yarn run --silent lint_md
flake8 $(git ls-files "*.py")
yamllint -c yamllint.yml $(git ls-files "*.yml")
- ShellCommand:
name: Unit Coverage
command: |
set -ex
mkdir -p $CIRCLE_TEST_REPORTS/unit
yarn test
yarn run test_legacy_location
env: &shared-vars
<<: *global-env
S3_LOCATION_FILE: tests/locationConfig/locationConfigTests.json
CIRCLE_TEST_REPORTS: /tmp
CIRCLE_ARTIFACTS: /tmp
CI_REPORTS: /tmp
- ShellCommand:
name: Unit Coverage logs
command: find /tmp/unit -exec cat {} \;
- ShellCommand: *setup-junit-upload
- Upload: *upload-junits
multiple-backend-test:
worker:
type: kube_pod
path: eve/workers/pod.yaml
images:
aggressor: eve/workers/build
s3: "."
vars:
aggressorMem: "2Gi"
s3Mem: "2Gi"
env:
<<: *multiple-backend-vars
<<: *global-env
steps:
- Git: *clone
- ShellCommand: *setup-github-ssh
- ShellCommand: *credentials
- ShellCommand: *yarn-install
- ShellCommand:
command: |
bash -c "
source /root/.aws/exports &> /dev/null
set -ex
bash wait_for_local_port.bash 8000 40
yarn run multiple_backend_test"
<<: *follow-s3-log
env:
<<: *multiple-backend-vars
<<: *global-env
S3_LOCATION_FILE: tests/locationConfig/locationConfigTests.json
- ShellCommand:
command: mvn test
workdir: build/tests/functional/jaws
<<: *follow-s3-log
env:
<<: *multiple-backend-vars
- ShellCommand:
command: rspec tests.rb
workdir: build/tests/functional/fog
<<: *follow-s3-log
env:
<<: *multiple-backend-vars
- ShellCommand: *check-s3-action-logs
- ShellCommand: *setup-junit-upload
- Upload: *upload-artifacts
- Upload: *upload-junits
file-ft-tests:
worker:
type: kube_pod
path: eve/workers/pod.yaml
images:
aggressor: eve/workers/build
s3: "."
vars:
aggressorMem: "1920Mi"
s3Mem: "2Gi"
redis: enabled
env:
<<: *file-mem-mpu
<<: *global-env
steps:
- Git: *clone
- ShellCommand: *setup-github-ssh
- ShellCommand: *credentials
- ShellCommand: *yarn-install
- ShellCommand:
command: |
set -ex
bash wait_for_local_port.bash 8000 40
yarn run ft_test
<<: *follow-s3-log
env:
<<: *file-mem-mpu
<<: *global-env
- ShellCommand: *check-s3-action-logs
- ShellCommand: *setup-junit-upload
- Upload: *upload-artifacts
- Upload: *upload-junits
post-merge:
worker:
type: local
steps:
- Git: *clone
- ShellCommand: &docker_login
name: Private Registry Login
command: >
docker login
-u '%(secret:private_registry_username)s'
-p '%(secret:private_registry_password)s'
'%(secret:private_registry_url)s'
- ShellCommand:
name: Dockerhub Login
command: >
docker login
-u '%(secret:dockerhub_ro_user)s'
-p '%(secret:dockerhub_ro_password)s'
- SetProperty: &docker_image_name
name: Set docker image name property
property: docker_image_name
value:
"%(secret:private_registry_url)s/zenko/cloudserver:\
%(prop:commit_short_revision)s"
- ShellCommand:
name: Build docker image
command: >-
docker build
--no-cache
-t %(prop:docker_image_name)s
.
- ShellCommand:
name: Tag images
command: |
docker tag %(prop:docker_image_name)s zenko/cloudserver:$TAG
env:
TAG: "latest-%(prop:product_version)s"
- ShellCommand:
name: Push image
command: |
docker push %(prop:docker_image_name)s
docker push zenko/cloudserver:latest-%(prop:product_version)s

View File

@ -1,58 +0,0 @@
FROM buildpack-deps:bionic-curl
#
# Install packages needed by the buildchain
#
ENV LANG C.UTF-8
COPY ./s3_packages.list ./buildbot_worker_packages.list /tmp/
RUN curl -sS http://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
&& echo "deb http://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
&& apt-get update \
&& cat /tmp/*packages.list | xargs apt-get install -y \
&& update-ca-certificates \
&& git clone https://github.com/tj/n.git \
&& make -C ./n \
&& n 10.22.0 \
&& pip install pip==9.0.1 \
&& rm -rf ./n \
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /tmp/packages.list
#
# Add user eve
#
RUN adduser -u 1042 --home /home/eve --disabled-password --gecos "" eve \
&& adduser eve sudo \
&& sed -ri 's/(%sudo.*)ALL$/\1NOPASSWD:ALL/' /etc/sudoers
#
# Install Dependencies
#
# Install RVM and gems
ENV RUBY_VERSION="2.4.1"
COPY ./gems.list /tmp/
RUN cat /tmp/gems.list | xargs gem install
#RUN gpg --keyserver hkp://keys.gnupg.net --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 \
# && curl -sSL https://get.rvm.io | bash -s stable --ruby=$RUBY_VERSION \
# && usermod -a -G rvm eve
#RUN /bin/bash -l -c "\
# source /usr/local/rvm/scripts/rvm \
# && cat /tmp/gems.list | xargs gem install \
# && rm /tmp/gems.list"
# Install Pip packages
COPY ./pip_packages.list /tmp/
RUN cat /tmp/pip_packages.list | xargs pip install \
&& rm -f /tmp/pip_packages.list \
&& mkdir /home/eve/.aws \
&& chown eve /home/eve/.aws
#
# Run buildbot-worker on startup
#
ARG BUILDBOT_VERSION
RUN pip install buildbot-worker==$BUILDBOT_VERSION
CMD ["/bin/bash", "-l", "-c", "buildbot-worker create-worker . $BUILDMASTER:$BUILDMASTER_PORT $WORKERNAME $WORKERPASS && buildbot-worker start --nodaemon"]

View File

@ -1,13 +0,0 @@
ca-certificates
git
gnupg
libffi-dev
libssl-dev
python-pip
python2.7
python2.7-dev
software-properties-common
sudo
tcl
wget
procps

View File

@ -1,4 +0,0 @@
fog-aws:1.3.0
json
mime-types:3.1
rspec:3.5

View File

@ -1,3 +0,0 @@
flake8
s3cmd==1.6.1
yamllint

View File

@ -1,13 +0,0 @@
build-essential
ca-certificates
curl
default-jdk
gnupg2
libdigest-hmac-perl
lsof
maven
netcat
redis-server
ruby-full
yarn
zlib1g-dev

View File

@ -1,144 +0,0 @@
---
apiVersion: v1
kind: Pod
metadata:
name: "proxy-ci-test-pod"
spec:
restartPolicy: Never
terminationGracePeriodSeconds: 10
hostAliases:
- ip: "127.0.0.1"
hostnames:
- "bucketwebsitetester.s3-website-us-east-1.amazonaws.com"
containers:
- name: aggressor
image: {{ images.aggressor }}
imagePullPolicy: IfNotPresent
resources:
requests:
cpu: "1"
memory: {{ vars.aggressorMem }}
limits:
cpu: "1"
memory: {{ vars.aggressorMem }}
volumeMounts:
- name: creds
readOnly: false
mountPath: /root/.aws
- name: artifacts
readOnly: true
mountPath: /artifacts
command:
- bash
- -lc
- |
buildbot-worker create-worker . $BUILDMASTER:$BUILDMASTER_PORT $WORKERNAME $WORKERPASS
buildbot-worker start --nodaemon
env:
- name: CI
value: "true"
- name: ENABLE_LOCAL_CACHE
value: "true"
- name: REPORT_TOKEN
value: "report-token-1"
- name: REMOTE_MANAGEMENT_DISABLE
value: "1"
{% for key, value in vars.env.items() %}
- name: {{ key }}
value: "{{ value }}"
{% endfor %}
- name: s3
image: {{ images.s3 }}
imagePullPolicy: IfNotPresent
resources:
requests:
cpu: "2"
memory: {{ vars.s3Mem }}
limits:
cpu: "2"
memory: {{ vars.s3Mem }}
volumeMounts:
- name: creds
readOnly: false
mountPath: /root/.aws
- name: certs
readOnly: true
mountPath: /tmp
- name: artifacts
readOnly: false
mountPath: /artifacts
command:
- bash
- -ec
- |
sleep 10 # wait for
/usr/src/app/docker-entrypoint.sh npm start | tee -a /artifacts/s3.log
env:
{% if vars.env.S3DATA is defined and vars.env.S3DATA == "multiple" -%}
- name: S3_LOCATION_FILE
value: "/usr/src/app/tests/locationConfig/locationConfigTests.json"
{%- endif %}
- name: CI
value: "true"
- name: ENABLE_LOCAL_CACHE
value: "true"
- name: REDIS_HOST
value: "localhost"
- name: REDIS_PORT
value: "6379"
- name: REPORT_TOKEN
value: "report-token-1"
- name: REMOTE_MANAGEMENT_DISABLE
value: "1"
- name: HEALTHCHECKS_ALLOWFROM
value: "0.0.0.0/0"
{% for key, value in vars.env.items() %}
- name: {{ key }}
value: "{{ value }}"
{% endfor %}
{% if vars.redis is defined and vars.redis == "enabled" -%}
- name: redis
image: redis:alpine
imagePullPolicy: IfNotPresent
resources:
requests:
cpu: 200m
memory: 128Mi
limits:
cpu: 200m
memory: 128Mi
{%- endif %}
{% if vars.env.CI_PROXY is defined and vars.env.CI_PROXY == "true" -%}
- name: squid
image: scality/ci-squid
imagePullPolicy: IfNotPresent
resources:
requests:
cpu: 250m
memory: 128Mi
limits:
cpu: 250m
memory: 128Mi
volumeMounts:
- name: certs
readOnly: false
mountPath: /ssl
command:
- sh
- -exc
- |
mkdir -p /ssl
openssl req -new -newkey rsa:2048 -sha256 -days 365 -nodes -x509 \
-subj "/C=US/ST=Country/L=City/O=Organization/CN=CN=scality-proxy" \
-keyout /ssl/myca.pem -out /ssl/myca.pem
cp /ssl/myca.pem /ssl/CA.pem
squid -f /etc/squid/squid.conf -N -z
squid -f /etc/squid/squid.conf -NYCd 1
{%- endif %}
volumes:
- name: creds
emptyDir: {}
- name: certs
emptyDir: {}
- name: artifacts
emptyDir: {}

View File

@ -84,6 +84,15 @@ function abortMultipartUpload(authInfo, bucketName, objectKey, uploadId, log,
}
return next(null, mpuBucket, destBucket, false);
},
function sendAbortPut(mpuBucket, destBucket, skipDataDelete, next) {
services.sendAbortMPUPut(bucketName, objectKey, uploadId, log,
err => {
if (err) {
return next(err, destBucket);
}
return next(null, mpuBucket, destBucket, skipDataDelete);
});
},
function getPartLocations(mpuBucket, destBucket, skipDataDelete,
next) {
services.getMPUparts(mpuBucket.getName(), uploadId, log,

View File

@ -155,6 +155,22 @@ function completeMultipartUpload(authInfo, request, log, callback) {
}
return next(errors.MalformedXML, destBucket);
},
function markOverviewForCompletion(destBucket, objMD, mpuBucket, jsonList,
storedMetadata, location, mpuOverviewKey, next) {
return services.metadataMarkMPObjectForCompletion({
bucketName: mpuBucket.getName(),
objectKey,
uploadId,
splitter,
storedMetadata,
}, log, err => {
if (err) {
return next(err);
}
return next(null, destBucket, objMD, mpuBucket,
jsonList, storedMetadata, location, mpuOverviewKey);
});
},
function retrieveParts(destBucket, objMD, mpuBucket, jsonList,
storedMetadata, location, mpuOverviewKey, next) {
return services.getMPUparts(mpuBucket.getName(), uploadId, log,

View File

@ -1,3 +1,4 @@
/* eslint-disable max-len */
const { errors, s3middleware } = require('arsenal');
const validateHeaders = s3middleware.validateConditionalHeaders;
const { parseRange } = require('arsenal/lib/network/http/utils');
@ -124,10 +125,16 @@ function objectHead(authInfo, request, log, callback) {
return callback(errors.BadRequest, corsHeaders);
}
const partSize = getPartSize(objMD, partNumber);
if (!partSize) {
const isEmptyObject = objLength === 0;
if (!partSize && !isEmptyObject) {
return callback(errors.InvalidRange, corsHeaders);
}
responseHeaders['content-length'] = partSize;
responseHeaders['content-length'] =
isEmptyObject
? 0
: partSize;
const partsCount = getPartCountFromMd5(objMD);
if (partsCount) {
responseHeaders['x-amz-mp-parts-count'] = partsCount;

View File

@ -227,12 +227,12 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
res.controllingLocationConstraint;
return next(null, dataLocator, destBucketMD,
destObjLocationConstraint, copyObjectSize,
sourceVerId, sourceLocationConstraintName);
sourceVerId, sourceLocationConstraintName, splitter);
});
},
function goGetData(dataLocator, destBucketMD,
destObjLocationConstraint, copyObjectSize, sourceVerId,
sourceLocationConstraintName, next) {
sourceLocationConstraintName, splitter, next) {
data.uploadPartCopy(request, log, destBucketMD,
sourceLocationConstraintName,
destObjLocationConstraint, dataLocator, dataStoreContext,
@ -241,18 +241,18 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
if (error.message === 'skip') {
return next(skipError, destBucketMD, eTag,
lastModified, sourceVerId,
serverSideEncryption);
serverSideEncryption, lastModified, splitter);
}
return next(error, destBucketMD);
}
return next(null, destBucketMD, locations, eTag,
copyObjectSize, sourceVerId, serverSideEncryption,
lastModified);
lastModified, splitter);
});
},
function getExistingPartInfo(destBucketMD, locations, totalHash,
copyObjectSize, sourceVerId, serverSideEncryption, lastModified,
next) {
splitter, next) {
const partKey =
`${uploadId}${constants.splitter}${paddedPartNumber}`;
metadata.getObjectMD(mpuBucketName, partKey, {}, log,
@ -276,12 +276,12 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
}
return next(null, destBucketMD, locations, totalHash,
prevObjectSize, copyObjectSize, sourceVerId,
serverSideEncryption, lastModified, oldLocations);
serverSideEncryption, lastModified, oldLocations, splitter);
});
},
function storeNewPartMetadata(destBucketMD, locations, totalHash,
prevObjectSize, copyObjectSize, sourceVerId, serverSideEncryption,
lastModified, oldLocations, next) {
lastModified, oldLocations, splitter, next) {
const metaStoreParams = {
partNumber: paddedPartNumber,
contentMD5: totalHash,
@ -297,20 +297,58 @@ function objectPutCopyPart(authInfo, request, sourceBucket,
{ error: err, method: 'storeNewPartMetadata' });
return next(err);
}
return next(null, oldLocations, destBucketMD, totalHash,
return next(null, locations, oldLocations, destBucketMD, totalHash,
lastModified, sourceVerId, serverSideEncryption,
prevObjectSize, copyObjectSize);
prevObjectSize, copyObjectSize, splitter);
});
},
function cleanupExistingData(oldLocations, destBucketMD, totalHash,
function checkCanDeleteOldLocations(partLocations, oldLocations, destBucketMD,
totalHash, lastModified, sourceVerId, serverSideEncryption,
prevObjectSize, copyObjectSize, splitter, next) {
if (!oldLocations) {
return next(null, oldLocations, destBucketMD, totalHash,
lastModified, sourceVerId, serverSideEncryption,
prevObjectSize, copyObjectSize);
}
return services.isCompleteMPUInProgress({
bucketName: destBucketName,
objectKey: destObjectKey,
uploadId,
splitter,
}, log, (err, completeInProgress) => {
if (err) {
return next(err, destBucketMD);
}
let oldLocationsToDelete = oldLocations;
// Prevent deletion of old data if a completeMPU
// is already in progress because then there is no
// guarantee that the old location will not be the
// committed one.
if (completeInProgress) {
log.warn('not deleting old locations because CompleteMPU is in progress', {
method: 'objectPutCopyPart::checkCanDeleteOldLocations',
bucketName: destBucketName,
objectKey: destObjectKey,
uploadId,
partLocations,
oldLocations,
});
oldLocationsToDelete = null;
}
return next(null, oldLocationsToDelete, destBucketMD, totalHash,
lastModified, sourceVerId, serverSideEncryption,
prevObjectSize, copyObjectSize);
});
},
function cleanupExistingData(oldLocationsToDelete, destBucketMD, totalHash,
lastModified, sourceVerId, serverSideEncryption,
prevObjectSize, copyObjectSize, next) {
// Clean up the old data now that new metadata (with new
// data locations) has been stored
if (oldLocations) {
if (oldLocationsToDelete) {
const delLog = logger.newRequestLoggerFromSerializedUids(
log.getSerializedUids());
return data.batchDelete(oldLocations, request.method, null,
return data.batchDelete(oldLocationsToDelete, request.method, null,
delLog, err => {
if (err) {
// if error, log the error and move on as it is not

View File

@ -12,6 +12,7 @@ const kms = require('../kms/wrapper');
const metadata = require('../metadata/wrapper');
const { pushMetric } = require('../utapi/utilities');
const logger = require('../utilities/logger');
const services = require('../services');
const { config } = require('../Config');
const multipleBackendGateway = require('../data/multipleBackendGateway');
const locationConstraintCheck
@ -269,19 +270,19 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
}
return next(null, destinationBucket,
objectLocationConstraint, cipherBundle,
partKey, prevObjectSize, oldLocations, partInfo);
partKey, prevObjectSize, oldLocations, partInfo, splitter);
});
},
// Store in data backend.
(destinationBucket, objectLocationConstraint, cipherBundle,
partKey, prevObjectSize, oldLocations, partInfo, next) => {
partKey, prevObjectSize, oldLocations, partInfo, splitter, next) => {
// NOTE: set oldLocations to null so we do not batchDelete for now
if (partInfo && partInfo.dataStoreType === 'azure') {
// skip to storing metadata
return next(null, destinationBucket, partInfo,
partInfo.dataStoreETag,
cipherBundle, partKey, prevObjectSize, null,
objectLocationConstraint);
objectLocationConstraint, splitter);
}
const objectContext = {
bucketName,
@ -301,12 +302,13 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
}
return next(null, destinationBucket, dataGetInfo, hexDigest,
cipherBundle, partKey, prevObjectSize, oldLocations,
objectLocationConstraint);
objectLocationConstraint, splitter);
});
},
// Store data locations in metadata and delete any overwritten data.
// Store data locations in metadata and delete any overwritten
// data if completeMPU hasn't been initiated yet.
(destinationBucket, dataGetInfo, hexDigest, cipherBundle, partKey,
prevObjectSize, oldLocations, objectLocationConstraint, next) => {
prevObjectSize, oldLocations, objectLocationConstraint, splitter, next) => {
// Use an array to be consistent with objectPutCopyPart where there
// could be multiple locations.
const partLocations = [dataGetInfo];
@ -336,19 +338,54 @@ function objectPutPart(authInfo, request, streamingV4Params, log,
});
return next(err, destinationBucket);
}
return next(null, oldLocations, objectLocationConstraint,
destinationBucket, hexDigest, prevObjectSize);
return next(null, partLocations, oldLocations, objectLocationConstraint,
destinationBucket, hexDigest, prevObjectSize, splitter);
});
},
(partLocations, oldLocations, objectLocationConstraint, destinationBucket,
hexDigest, prevObjectSize, splitter, next) => {
if (!oldLocations) {
return next(null, oldLocations, objectLocationConstraint,
destinationBucket, hexDigest, prevObjectSize);
}
return services.isCompleteMPUInProgress({
bucketName,
objectKey,
uploadId,
splitter,
}, log, (err, completeInProgress) => {
if (err) {
return next(err, destinationBucket);
}
let oldLocationsToDelete = oldLocations;
// Prevent deletion of old data if a completeMPU
// is already in progress because then there is no
// guarantee that the old location will not be the
// committed one.
if (completeInProgress) {
log.warn('not deleting old locations because CompleteMPU is in progress', {
method: 'objectPutPart::metadata.getObjectMD',
bucketName,
objectKey,
uploadId,
partLocations,
oldLocations,
});
oldLocationsToDelete = null;
}
return next(null, oldLocationsToDelete, objectLocationConstraint,
destinationBucket, hexDigest, prevObjectSize);
});
},
// Clean up any old data now that new metadata (with new
// data locations) has been stored.
(oldLocations, objectLocationConstraint, destinationBucket, hexDigest,
(oldLocationsToDelete, objectLocationConstraint, destinationBucket, hexDigest,
prevObjectSize, next) => {
if (oldLocations) {
if (oldLocationsToDelete) {
log.trace('overwriting mpu part, deleting data');
const delLog = logger.newRequestLoggerFromSerializedUids(
log.getSerializedUids());
return data.batchDelete(oldLocations, request.method,
return data.batchDelete(oldLocationsToDelete, request.method,
objectLocationConstraint, delLog, err => {
if (err) {
// if error, log the error and move on as it is not

View File

@ -190,6 +190,10 @@ class BucketFileInterface {
if (err) {
return cb(err);
}
// Ignore the PUT done by AbortMPU
if (params && params.isAbort) {
return cb();
}
db.withRequestLogger(log)
.put(objName, JSON.stringify(objVal), params, (err, data) => {
if (err) {

View File

@ -79,7 +79,11 @@ const metastore = {
putObject: (bucketName, objName, objVal, params, log, cb) => {
process.nextTick(() => {
metastore.getBucketAttributes(bucketName, log, err => {
// Ignore the PUT done by AbortMPU
if (params && params.isAbort) {
return cb(null);
}
return metastore.getBucketAttributes(bucketName, log, err => {
if (err) {
return cb(err);
}

View File

@ -421,6 +421,80 @@ const services = {
});
},
/**
* Mark the MPU overview key with a flag when starting the
* CompleteMPU operation, to be checked by "put part" operations
*
* @param {object} params - params object
* @param {string} params.bucketName - name of MPU bucket
* @param {string} params.objectKey - object key
* @param {string} params.uploadId - upload ID
* @param {string} params.splitter - splitter for this overview key
* @param {object} params.storedMetadata - original metadata of the overview key
* @param {Logger} log - Logger object
* @param {function} cb - callback(err)
* @return {undefined}
*/
metadataMarkMPObjectForCompletion(params, log, cb) {
assert.strictEqual(typeof params, 'object');
assert.strictEqual(typeof params.bucketName, 'string');
assert.strictEqual(typeof params.objectKey, 'string');
assert.strictEqual(typeof params.uploadId, 'string');
assert.strictEqual(typeof params.splitter, 'string');
assert.strictEqual(typeof params.storedMetadata, 'object');
const splitter = params.splitter;
const longMPUIdentifier =
`overview${splitter}${params.objectKey}${splitter}${params.uploadId}`;
const multipartObjectMD = Object.assign({}, params.storedMetadata);
multipartObjectMD.completeInProgress = true;
metadata.putObjectMD(params.bucketName, longMPUIdentifier, multipartObjectMD,
{}, log, err => {
if (err) {
log.error('error from metadata', { error: err });
return cb(err);
}
return cb();
});
},
/**
* Returns if a CompleteMPU operation is in progress for this
* object, by looking at the `completeInProgress` flag stored in
* the overview key
*
* @param {object} params - params object
* @param {string} params.bucketName - bucket name where object should be stored
* @param {string} params.objectKey - object key
* @param {string} params.uploadId - upload ID
* @param {string} params.splitter - splitter for this overview key
* @param {object} log - request logger instance
* @param {function} cb - callback(err, {bool} completeInProgress)
* @return {undefined}
*/
isCompleteMPUInProgress(params, log, cb) {
assert.strictEqual(typeof params, 'object');
assert.strictEqual(typeof params.bucketName, 'string');
assert.strictEqual(typeof params.objectKey, 'string');
assert.strictEqual(typeof params.uploadId, 'string');
assert.strictEqual(typeof params.splitter, 'string');
const mpuBucketName = `${constants.mpuBucketPrefix}${params.bucketName}`;
const splitter = params.splitter;
const mpuOverviewKey =
`overview${splitter}${params.objectKey}${splitter}${params.uploadId}`;
return metadata.getObjectMD(mpuBucketName, mpuOverviewKey, {}, log,
(err, res) => {
if (err) {
log.error('error getting the overview object from mpu bucket', {
error: err,
method: 'services.isCompleteMPUInProgress',
params,
});
return cb(err);
}
return cb(null, Boolean(res.completeInProgress));
});
},
/**
* Checks whether bucket exists, multipart upload
@ -709,6 +783,23 @@ const services = {
metadata.deleteObjectMD(mpuBucketName, key, {}, log, callback);
}, err => cb(err));
},
/**
*
* @param {string} bucketName - MPU destination bucket name
* @param {string} objectKey - MPU destination key
* @param {string} uploadId - MPU uploadId
* @param {object} log - werelogs logger
* @param {Function} cb - callback called with possible error
* @returns {undefined} -
*/
sendAbortMPUPut(bucketName, objectKey, uploadId, log, cb) {
const storeParams = {
isAbort: true,
replayId: uploadId,
};
metadata.putObjectMD(bucketName, objectKey, {}, storeParams, log, cb);
},
};
module.exports = services;

View File

@ -43,7 +43,7 @@
"bluebird": "^3.3.1",
"eslint": "^2.4.0",
"eslint-config-airbnb": "^6.0.0",
"eslint-config-scality": "scality/Guidelines#71a059ad",
"eslint-config-scality": "scality/Guidelines#7.4.10.1",
"ioredis": "4.9.5",
"istanbul": "1.0.0-alpha.2",
"istanbul-api": "1.0.0-alpha.13",
@ -52,6 +52,7 @@
"mocha-junit-reporter": "^1.23.1",
"mocha-multi-reporters": "^1.1.7",
"node-mocks-http": "1.5.2",
"sinon": "^13.0.1",
"tv4": "^1.2.7"
},
"scripts": {

View File

@ -210,5 +210,39 @@ describe('Complete MPU', () => {
});
});
});
describe('with re-upload of part during CompleteMPU execution', () => {
let uploadId;
let eTag;
beforeEach(() => _initiateMpuAndPutOnePart()
.then(result => {
uploadId = result.uploadId;
eTag = result.eTag;
})
);
it('should complete the MPU successfully and leave a readable object', done => {
async.parallel([
doneReUpload => s3.uploadPart({
Bucket: bucket,
Key: key,
PartNumber: 1,
UploadId: uploadId,
Body: 'foo',
}, err => {
// in case the CompleteMPU finished earlier,
// we may get a NoSuchKey error, so just
// ignore it
if (err && err.code === 'NoSuchKey') {
return doneReUpload();
}
return doneReUpload(err);
}),
doneComplete => _completeMpuAndCheckVid(
uploadId, eTag, undefined, doneComplete),
], done);
});
});
});
});

View File

@ -577,6 +577,72 @@ describe('Object Part Copy', () => {
checkNoError(err);
});
});
it('should not corrupt object if overwriting an existing part by copying a part ' +
'while the MPU is being completed', () => {
// AWS response etag for this completed MPU
const finalObjETag = '"db77ebbae9e9f5a244a26b86193ad818-1"';
process.stdout.write('Putting first part in MPU test');
return s3.uploadPartCopy({ Bucket: destBucketName,
Key: destObjName,
CopySource: `${sourceBucketName}/${sourceObjName}`,
PartNumber: 1,
UploadId: uploadId,
}).promise().then(res => {
assert.strictEqual(res.ETag, etag);
assert(res.LastModified);
}).then(() => {
process.stdout.write('Overwriting first part in MPU test and completing MPU ' +
'at the same time');
return Promise.all([
s3.uploadPartCopy({
Bucket: destBucketName,
Key: destObjName,
CopySource: `${sourceBucketName}/${sourceObjName}`,
PartNumber: 1,
UploadId: uploadId,
}).promise().catch(err => {
// in case the CompleteMPU finished
// earlier, we may get a NoSuchKey error,
// so just ignore it and resolve with a
// special value, otherwise re-throw the
// error
if (err && err.code === 'NoSuchKey') {
return Promise.resolve(null);
}
throw err;
}),
s3.completeMultipartUpload({
Bucket: destBucketName,
Key: destObjName,
UploadId: uploadId,
MultipartUpload: {
Parts: [
{ ETag: etag, PartNumber: 1 },
],
},
}).promise(),
]);
}).then(([uploadRes, completeRes]) => {
// if upload succeeded before CompleteMPU finished
if (uploadRes !== null) {
assert.strictEqual(uploadRes.ETag, etag);
assert(uploadRes.LastModified);
}
assert.strictEqual(completeRes.Bucket, destBucketName);
assert.strictEqual(completeRes.Key, destObjName);
assert.strictEqual(completeRes.ETag, finalObjETag);
}).then(() => {
process.stdout.write('Getting object put by MPU with ' +
'overwrite part');
return s3.getObject({
Bucket: destBucketName,
Key: destObjName,
}).promise();
}).then(res => {
assert.strictEqual(res.ETag, finalObjETag);
});
});
});
it('should return an error if no such upload initiated',

View File

@ -1,20 +1,10 @@
/* eslint-disable max-len */
const assert = require('assert');
const async = require('async');
const withV4 = require('../support/withV4');
const BucketUtility = require('../../lib/utility/bucket-util');
const { maximumAllowedPartCount } = require('../../../../../constants');
const bucket = 'mpu-test-bucket';
const object = 'mpu-test-object';
const bodySize = 1024 * 1024 * 5;
const bodyContent = 'a';
const howManyParts = 3;
const partNumbers = Array.from(Array(howManyParts).keys());
const invalidPartNumbers = [-1, 0, maximumAllowedPartCount + 1];
let ETags = [];
const objectConfigs = require('../support/objectConfigs');
function checkError(err, statusCode, code) {
assert.strictEqual(err.statusCode, statusCode);
@ -26,128 +16,155 @@ function checkNoError(err) {
`Expected success, got error ${JSON.stringify(err)}`);
}
function generateContent(partNumber) {
return Buffer.alloc(bodySize + partNumber, bodyContent);
function generateContent(size, bodyContent) {
return Buffer.alloc(size, bodyContent);
}
describe('Part size tests with object head', () => {
withV4(sigCfg => {
let bucketUtil;
let s3;
objectConfigs.forEach(config => {
describe(config.signature, () => {
let ETags = [];
function headObject(fields, cb) {
s3.headObject(Object.assign({
Bucket: bucket,
Key: object,
}, fields), cb);
}
const {
bucket,
object,
bodySize,
bodyContent,
partNumbers,
invalidPartNumbers,
} = config;
beforeEach(function beforeF(done) {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
withV4(sigCfg => { //eslint-disable-line
let bucketUtil;
let s3;
async.waterfall([
next => s3.createBucket({ Bucket: bucket }, err => next(err)),
next => s3.createMultipartUpload({ Bucket: bucket,
Key: object }, (err, data) => {
checkNoError(err);
this.currentTest.UploadId = data.UploadId;
return next();
}),
next => async.mapSeries(partNumbers, (partNumber, callback) => {
const uploadPartParams = {
Bucket: bucket,
Key: object,
PartNumber: partNumber + 1,
UploadId: this.currentTest.UploadId,
Body: generateContent(partNumber + 1),
};
beforeEach(function beforeF(done) {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
return s3.uploadPart(uploadPartParams,
(err, data) => {
if (err) {
return callback(err);
async.waterfall([
next => s3.createBucket(
{ Bucket: bucket }, err => next(err)),
next => s3.createMultipartUpload({ Bucket: bucket,
Key: object }, (err, data) => {
checkNoError(err);
this.currentTest.UploadId = data.UploadId;
return next();
}),
next => async.mapSeries(partNumbers, (partNumber, callback) => {
let allocAmount = bodySize + partNumber + 1;
if (config.signature === 'for empty object') {
allocAmount = 0;
}
return callback(null, data.ETag);
});
}, (err, results) => {
checkNoError(err);
ETags = results;
return next();
}),
next => {
const params = {
Bucket: bucket,
Key: object,
MultipartUpload: {
Parts: partNumbers.map(partNumber => ({
ETag: ETags[partNumber],
const uploadPartParams = {
Bucket: bucket,
Key: object,
PartNumber: partNumber + 1,
})),
UploadId: this.currentTest.UploadId,
Body: generateContent(allocAmount, bodyContent),
};
return s3.uploadPart(uploadPartParams,
(err, data) => {
if (err) {
return callback(err);
}
return callback(null, data.ETag);
});
}, (err, results) => {
checkNoError(err);
ETags = results;
return next();
}),
next => {
const params = {
Bucket: bucket,
Key: object,
MultipartUpload: {
Parts: partNumbers.map(partNumber => ({
ETag: ETags[partNumber],
PartNumber: partNumber + 1,
})),
},
UploadId: this.currentTest.UploadId,
};
return s3.completeMultipartUpload(params, next);
},
UploadId: this.currentTest.UploadId,
};
return s3.completeMultipartUpload(params, next);
},
], err => {
checkNoError(err);
done();
});
});
], err => {
checkNoError(err);
done();
});
});
afterEach(done => {
async.waterfall([
next => s3.deleteObject({ Bucket: bucket, Key: object },
err => next(err)),
next => s3.deleteBucket({ Bucket: bucket }, err => next(err)),
], done);
});
afterEach(done => {
async.waterfall([
next => s3.deleteObject({ Bucket: bucket, Key: object },
err => next(err)),
next => s3.deleteBucket({ Bucket: bucket }, err => next(err)),
], done);
});
it('should return the total size of the object ' +
'when --part-number is not used', done => {
const totalSize = partNumbers.reduce((total, current) =>
total + (bodySize + current + 1), 0);
headObject({}, (err, data) => {
checkNoError(err);
assert.equal(totalSize, data.ContentLength);
done();
});
});
it('should return the total size of the object ' +
'when --part-number is not used', done => {
const totalSize = config.meta.computeTotalSize(partNumbers, bodySize);
partNumbers.forEach(part => {
it(`should return the size of part ${part + 1} ` +
`when --part-number is set to ${part + 1}`, done => {
const partNumber = Number.parseInt(part, 0) + 1;
const partSize = bodySize + partNumber;
headObject({ PartNumber: partNumber }, (err, data) => {
checkNoError(err);
assert.equal(partSize, data.ContentLength);
done();
s3.headObject({ Bucket: bucket, Key: object }, (err, data) => {
checkNoError(err);
assert.equal(totalSize, data.ContentLength);
done();
});
});
partNumbers.forEach(part => {
it(`should return the size of part ${part + 1} ` +
`when --part-number is set to ${part + 1}`, done => {
const partNumber = Number.parseInt(part, 0) + 1;
const partSize = bodySize + partNumber;
s3.headObject({ Bucket: bucket, Key: object, PartNumber: partNumber }, (err, data) => {
checkNoError(err);
if (data.ContentLength === 0) {
done();
}
assert.equal(partSize, data.ContentLength);
done();
});
});
});
invalidPartNumbers.forEach(part => {
it(`should return an error when --part-number is set to ${part}`,
done => {
s3.headObject({ Bucket: bucket, Key: object, PartNumber: part }, (err, data) => {
checkError(err, 400, 'BadRequest');
assert.strictEqual(data, null);
done();
});
});
});
it('when incorrect --part-number is used', done => {
bucketUtil = new BucketUtility('default', sigCfg);
s3 = bucketUtil.s3;
s3.headObject({ Bucket: bucket, Key: object, PartNumber: partNumbers.length + 1 },
(err, data) => {
if (config.meta.objectIsEmpty) {
// returns metadata for the only empty part
checkNoError(err);
assert.strictEqual(data.ContentLength, 0);
done();
} else {
// returns a 416 error
// the error response does not contain the actual
// statusCode instead it has '416'
checkError(err, 416, 416);
assert.strictEqual(data, null);
done();
}
});
});
});
});
invalidPartNumbers.forEach(part => {
it(`should return an error when --part-number is set to ${part}`,
done => {
headObject({ PartNumber: part }, (err, data) => {
checkError(err, 400, 'BadRequest');
assert.strictEqual(data, null);
done();
});
});
});
it('should return an error when incorrect --part-number is used',
done => {
headObject({ PartNumber: partNumbers.length + 1 },
(err, data) => {
// the error response does not contain the actual
// statusCode instead it has '416'
checkError(err, 416, 416);
assert.strictEqual(data, null);
done();
});
});
});
});

View File

@ -0,0 +1,41 @@
/* eslint-disable max-len */
const { maximumAllowedPartCount } = require('../../../../../constants');
const canonicalObjectConfig = {
bucket: 'mpu-test-bucket-canonical-object',
object: 'mpu-test-object-canonical',
bodySize: 1024 * 1024 * 5,
bodyContent: 'a',
howManyParts: 3,
partNumbers: Array.from(Array(3).keys()), // 3 corresponds to howManyParts
invalidPartNumbers: [-1, 0, maximumAllowedPartCount + 1],
signature: 'for canonical object',
meta: {
computeTotalSize: (partNumbers, bodySize) => partNumbers.reduce((total, current) =>
total + bodySize + current + 1
, 0),
objectIsEmpty: false,
},
};
const emptyObjectConfig = {
bucket: 'mpu-test-bucket-empty-object',
object: 'mpu-test-object-empty',
bodySize: 0,
bodyContent: null,
howManyParts: 1,
partNumbers: Array.from(Array(1).keys()), // 1 corresponds to howManyParts
invalidPartNumbers: [-1, 0, maximumAllowedPartCount + 1],
signature: 'for empty object',
meta: {
computeTotalSize: () => 0,
objectIsEmpty: true,
},
};
const objectConfigs = [
canonicalObjectConfig,
emptyObjectConfig,
];
module.exports = objectConfigs;

View File

View File

@ -1,11 +1,13 @@
const assert = require('assert');
const async = require('async');
const { parseString } = require('xml2js');
const sinon = require('sinon');
const { errors } = require('arsenal');
const { cleanup, DummyRequestLogger } = require('../helpers');
const { config } = require('../../../lib/Config');
const services = require('../../../lib/services');
const DummyRequest = require('../DummyRequest');
const { bucketPut } = require('../../../lib/api/bucketPut');
const initiateMultipartUpload
@ -40,6 +42,7 @@ function _createAndAbortMpu(usEastSetting, fakeUploadID, locationConstraint,
callback) {
config.locationConstraints['us-east-1'].legacyAwsBehavior =
usEastSetting;
let uploadId;
const post = '<?xml version="1.0" encoding="UTF-8"?>' +
'<CreateBucketConfiguration ' +
'xmlns="http://s3.amazonaws.com/doc/2006-03-01/">' +
@ -54,7 +57,7 @@ function _createAndAbortMpu(usEastSetting, fakeUploadID, locationConstraint,
(json, next) => {
// use uploadId parsed from initiateMpu request to construct
// uploadPart and deleteMpu requests
const uploadId =
uploadId =
json.InitiateMultipartUploadResult.UploadId[0];
const partBody = Buffer.from('I am a part\n', 'utf8');
const partRequest = new DummyRequest({
@ -89,7 +92,7 @@ function _createAndAbortMpu(usEastSetting, fakeUploadID, locationConstraint,
}),
(deleteMpuRequest, next) =>
multipartDelete(authInfo, deleteMpuRequest, log, next),
], callback);
], err => callback(err, uploadId));
}
describe('Multipart Delete API', () => {
@ -136,4 +139,15 @@ describe('Multipart Delete API', () => {
done();
});
});
it('should send a PUT to bucketd with `isAbort` and `replayId`', done => {
const spy = sinon.spy(services, 'sendAbortMPUPut');
_createAndAbortMpu(true, false, eastLocation, (err, uploadId) => {
assert.ifError(err);
assert.strictEqual(spy.calledOnce, true);
assert.strictEqual(
spy.calledOnceWith(bucketName, objectKey, uploadId), true);
done();
});
});
});

View File

@ -1591,6 +1591,78 @@ describe('Multipart Upload API', () => {
});
});
it('should leave orphaned data when overwriting an object part during completeMPU',
done => {
const fullSizedPart = crypto.randomBytes(5 * 1024 * 1024);
const overWritePart = Buffer.from('Overwrite content', 'utf8');
let uploadId;
async.waterfall([
next => bucketPut(authInfo, bucketPutRequest, log, next),
(corsHeaders, next) => initiateMultipartUpload(authInfo,
initiateRequest, log, next),
(result, corsHeaders, next) => parseString(result, next),
(json, next) => {
uploadId = json.InitiateMultipartUploadResult.UploadId[0];
const requestObj = {
bucketName,
namespace,
objectKey,
headers: { host: `${bucketName}.s3.amazonaws.com` },
url: `/${objectKey}?partNumber=1&uploadId=${uploadId}`,
query: {
partNumber: '1',
uploadId,
},
};
const partRequest = new DummyRequest(requestObj, fullSizedPart);
objectPutPart(authInfo, partRequest, undefined, log, (err, partCalculatedHash) => {
assert.deepStrictEqual(err, null);
next(null, requestObj, partCalculatedHash);
});
},
(requestObj, partCalculatedHash, next) => {
assert.deepStrictEqual(ds[1].value, fullSizedPart);
async.parallel([
done => {
const partRequest = new DummyRequest(requestObj, overWritePart);
objectPutPart(authInfo, partRequest, undefined, log, err => {
assert.deepStrictEqual(err, null);
done();
});
},
done => {
const completeBody = '<CompleteMultipartUpload>' +
'<Part>' +
'<PartNumber>1</PartNumber>' +
`<ETag>"${partCalculatedHash}"</ETag>` +
'</Part>' +
'</CompleteMultipartUpload>';
const completeRequest = {
bucketName,
namespace,
objectKey,
parsedHost: 's3.amazonaws.com',
url: `/${objectKey}?uploadId=${uploadId}`,
headers: { host: `${bucketName}.s3.amazonaws.com` },
query: { uploadId },
post: completeBody,
};
completeMultipartUpload(authInfo, completeRequest, log, done);
},
], err => next(err));
},
],
err => {
assert.deepStrictEqual(err, null);
assert.strictEqual(ds[0], undefined);
assert.deepStrictEqual(ds[1].value, fullSizedPart);
assert.deepStrictEqual(ds[2].value, overWritePart);
done();
});
});
it('should throw an error on put of an object part with an invalid ' +
'uploadId', done => {
const testUploadId = 'invalidUploadID';
@ -1790,12 +1862,22 @@ describe('complete mpu with versioning', () => {
},
(eTag, testUploadId, next) => {
const origPutObject = metadataBackend.putObject;
let callCount = 0;
metadataBackend.putObject =
(bucketName, objName, objVal, params, log, cb) => {
assert.strictEqual(params.replayId, testUploadId);
metadataBackend.putObject = origPutObject;
metadataBackend.putObject(
bucketName, objName, objVal, params, log, cb);
(putBucketName, objName, objVal, params, log, cb) => {
if (callCount === 0) {
// first putObject sets the completeInProgress flag in the overview key
assert.strictEqual(putBucketName, `${constants.mpuBucketPrefix}${bucketName}`);
assert.strictEqual(
objName, `overview${splitter}${objectKey}${splitter}${testUploadId}`);
assert.strictEqual(objVal.completeInProgress, true);
} else {
assert.strictEqual(params.replayId, testUploadId);
metadataBackend.putObject = origPutObject;
}
origPutObject(
putBucketName, objName, objVal, params, log, cb);
callCount += 1;
};
const parts = [{ partNumber: 1, eTag }];
const completeRequest = _createCompleteMpuRequest(testUploadId,
@ -1852,12 +1934,22 @@ describe('complete mpu with versioning', () => {
},
(eTag, testUploadId, next) => {
const origPutObject = metadataBackend.putObject;
let callCount = 0;
metadataBackend.putObject =
(bucketName, objName, objVal, params, log, cb) => {
assert.strictEqual(params.replayId, testUploadId);
metadataBackend.putObject = origPutObject;
metadataBackend.putObject(
bucketName, objName, objVal, params, log, cb);
(putBucketName, objName, objVal, params, log, cb) => {
if (callCount === 0) {
// first putObject sets the completeInProgress flag in the overview key
assert.strictEqual(putBucketName, `${constants.mpuBucketPrefix}${bucketName}`);
assert.strictEqual(
objName, `overview${splitter}${objectKey}${splitter}${testUploadId}`);
assert.strictEqual(objVal.completeInProgress, true);
} else {
assert.strictEqual(params.replayId, testUploadId);
metadataBackend.putObject = origPutObject;
}
origPutObject(
putBucketName, objName, objVal, params, log, cb);
callCount += 1;
};
const parts = [{ partNumber: 1, eTag }];
const completeRequest = _createCompleteMpuRequest(testUploadId,

396
yarn.lock
View File

@ -74,6 +74,34 @@
dependencies:
"@hapi/hoek" "^9.0.0"
"@sinonjs/commons@^1.6.0", "@sinonjs/commons@^1.7.0", "@sinonjs/commons@^1.8.3":
version "1.8.3"
resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d"
integrity sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==
dependencies:
type-detect "4.0.8"
"@sinonjs/fake-timers@>=5", "@sinonjs/fake-timers@^9.0.0":
version "9.1.1"
resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-9.1.1.tgz#7b698e0b9d12d93611f06ee143c30ced848e2840"
integrity sha512-Wp5vwlZ0lOqpSYGKqr53INws9HLkt6JDc/pDZcPf7bchQnrXJMXPns8CXx0hFikMSGSWfvtvvpb2gtMVfkWagA==
dependencies:
"@sinonjs/commons" "^1.7.0"
"@sinonjs/samsam@^6.1.1":
version "6.1.1"
resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-6.1.1.tgz#627f7f4cbdb56e6419fa2c1a3e4751ce4f6a00b1"
integrity sha512-cZ7rKJTLiE7u7Wi/v9Hc2fs3Ucc3jrWeMgPHbbTCeVAB2S0wOBbYlkJVeNSL04i7fdhT8wIbDq1zhC/PXTD2SA==
dependencies:
"@sinonjs/commons" "^1.6.0"
lodash.get "^4.4.2"
type-detect "^4.0.8"
"@sinonjs/text-encoding@^0.7.1":
version "0.7.1"
resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz#8da5c6530915653f3a1f38fd5f101d8c3f8079c5"
integrity sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==
JSONStream@^1.0.0:
version "1.3.5"
resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.5.tgz#3208c1f08d3a4d99261ab64f92302bc15e111ca0"
@ -136,6 +164,14 @@ abstract-leveldown@~6.2.1:
level-supports "~1.0.0"
xtend "~4.0.0"
accepts@1.3.3:
version "1.3.3"
resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.3.tgz#c3ca7434938648c3e0d9c1e328dd68b622c284ca"
integrity sha1-w8p0NJOGSMPg2cHjKN1otiLChMo=
dependencies:
mime-types "~2.1.11"
negotiator "0.6.1"
accepts@~1.3.4:
version "1.3.7"
resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd"
@ -187,6 +223,14 @@ ajv-keywords@^1.0.0:
resolved "https://registry.yarnpkg.com/ajv-keywords/-/ajv-keywords-1.5.1.tgz#314dd0a4b3368fad3dfcdc54ede6171b886daf3c"
integrity sha1-MU3QpLM2j609/NxU7eYXG4htrzw=
ajv@4.10.0:
version "4.10.0"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-4.10.0.tgz#7ae6169180eb199192a8b9a19fd0f47fc9ac8764"
integrity sha1-euYWkYDrGZGSqLmhn9D0f8msh2Q=
dependencies:
co "^4.6.0"
json-stable-stringify "^1.0.1"
ajv@6.12.2:
version "6.12.2"
resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.2.tgz#c629c5eced17baf314437918d2da88c99d5958cd"
@ -256,6 +300,11 @@ argparse@^1.0.7, argparse@~1.0.2:
dependencies:
sprintf-js "~1.0.2"
arraybuffer.slice@0.0.6:
version "0.0.6"
resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.6.tgz#f33b2159f0532a3f3107a272c0ccfbd1ad2979ca"
integrity sha1-8zshWfBTKj8xB6JywMz70a0peco=
arraybuffer.slice@~0.0.7:
version "0.0.7"
resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz#3bbc4275dd584cc1b10809b89d4e8b63a69e7675"
@ -375,6 +424,31 @@ arsenal@scality/Arsenal#918a1d7:
optionalDependencies:
ioctl "2.0.0"
arsenal@scality/Arsenal#9f2e74e:
version "7.4.3"
resolved "https://codeload.github.com/scality/Arsenal/tar.gz/9f2e74ec6972527c2a9ca6ecb4155618f123fc19"
dependencies:
JSONStream "^1.0.0"
ajv "4.10.0"
async "~2.1.5"
debug "~2.3.3"
diskusage "^1.1.1"
ioredis "4.9.5"
ipaddr.js "1.2.0"
joi "^10.6"
level "~5.0.1"
level-sublevel "~6.6.5"
node-forge "^0.7.1"
simple-glob "^0.1"
socket.io "~1.7.3"
socket.io-client "~1.7.3"
utf8 "2.1.2"
uuid "^3.0.1"
werelogs scality/werelogs#0ff7ec82
xml2js "~0.4.16"
optionalDependencies:
ioctl "2.0.0"
asn1@~0.2.3:
version "0.2.4"
resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136"
@ -602,6 +676,11 @@ base64-js@^1.0.2, base64-js@^1.3.1:
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a"
integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==
base64id@1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/base64id/-/base64id-1.0.0.tgz#47688cb99bb6804f0e06d3e763b1c32e57d8e6b6"
integrity sha1-R2iMuZu2gE8OBtPnY7HDLlfY5rY=
base64id@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/base64id/-/base64id-2.0.0.tgz#2770ac6bc47d312af97a8bf9a634342e0cd25cb6"
@ -648,6 +727,11 @@ bl@~0.8.1:
dependencies:
readable-stream "~1.0.26"
blob@0.0.4:
version "0.0.4"
resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.4.tgz#bcf13052ca54463f30f9fc7e95b9a47630a94921"
integrity sha1-vPEwUspURj8w+fx+lbmkdjCpSSE=
blob@0.0.5:
version "0.0.5"
resolved "https://registry.yarnpkg.com/blob/-/blob-0.0.5.tgz#d680eeef25f8cd91ad533f5b01eed48e64caf683"
@ -881,6 +965,11 @@ component-bind@1.0.0:
resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1"
integrity sha1-AMYIq33Nk4l8AAllGx06jh5zu9E=
component-emitter@1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.1.2.tgz#296594f2753daa63996d2af08d15a95116c9aec3"
integrity sha1-KWWU8nU9qmOZbSrwjRWpURbJrsM=
component-emitter@1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6"
@ -982,6 +1071,13 @@ debug@2.2.0:
dependencies:
ms "0.7.1"
debug@2.3.3, debug@~2.3.3:
version "2.3.3"
resolved "https://registry.yarnpkg.com/debug/-/debug-2.3.3.tgz#40c453e67e6e13c901ddec317af8986cda9eff8c"
integrity sha1-QMRT5n5uE8kB3ewxeviYbNqe/4w=
dependencies:
ms "0.7.2"
debug@=3.1.0, debug@~3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
@ -1085,6 +1181,11 @@ diff@1.4.0:
resolved "https://registry.yarnpkg.com/diff/-/diff-1.4.0.tgz#7f28d2eb9ee7b15a97efd89ce63dcfdaa3ccbabf"
integrity sha1-fyjS657nsVqX79ic5j3P2qPMur8=
diff@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/diff/-/diff-5.0.0.tgz#7ed6ad76d859d030787ec35855f5b1daf31d852b"
integrity sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==
diskusage@1.1.3, diskusage@^1.1.1:
version "1.1.3"
resolved "https://registry.yarnpkg.com/diskusage/-/diskusage-1.1.3.tgz#680d7dbf1b679168a195c9240eb3552cbd2c067b"
@ -1126,6 +1227,24 @@ encoding-down@^6.3.0:
level-codec "^9.0.0"
level-errors "^2.0.0"
engine.io-client@~1.8.4:
version "1.8.6"
resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-1.8.6.tgz#d86967c488019524adf2265dba62b886994bd5fd"
integrity sha512-6+rInQu8xU7c0fIF6RC4SRKuHVWPt8Xq0bZYS4lMrTwmhRineOlEMsU3X0zS5mHIvCgJsmpOKEX7DhihGk7j0g==
dependencies:
component-emitter "1.2.1"
component-inherit "0.0.3"
debug "2.3.3"
engine.io-parser "1.3.2"
has-cors "1.1.0"
indexof "0.0.1"
parsejson "0.0.3"
parseqs "0.0.5"
parseuri "0.0.5"
ws "~1.1.5"
xmlhttprequest-ssl "1.6.3"
yeast "0.1.2"
engine.io-client@~3.4.0:
version "3.4.4"
resolved "https://registry.yarnpkg.com/engine.io-client/-/engine.io-client-3.4.4.tgz#77d8003f502b0782dd792b073a4d2cf7ca5ab967"
@ -1143,6 +1262,18 @@ engine.io-client@~3.4.0:
xmlhttprequest-ssl "~1.5.4"
yeast "0.1.2"
engine.io-parser@1.3.2:
version "1.3.2"
resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-1.3.2.tgz#937b079f0007d0893ec56d46cb220b8cb435220a"
integrity sha1-k3sHnwAH0Ik+xW1GyyILjLQ1Igo=
dependencies:
after "0.8.2"
arraybuffer.slice "0.0.6"
base64-arraybuffer "0.1.5"
blob "0.0.4"
has-binary "0.1.7"
wtf-8 "1.0.0"
engine.io-parser@~2.2.0:
version "2.2.1"
resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-2.2.1.tgz#57ce5611d9370ee94f99641b589f94c97e4f5da7"
@ -1154,6 +1285,18 @@ engine.io-parser@~2.2.0:
blob "0.0.5"
has-binary2 "~1.0.2"
engine.io@~1.8.4:
version "1.8.5"
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-1.8.5.tgz#4ebe5e75c6dc123dee4afdce6e5fdced21eb93f6"
integrity sha512-j1DWIcktw4hRwrv6nWx++5nFH2X64x16MAG2P0Lmi5Dvdfi3I+Jhc7JKJIdAmDJa+5aZ/imHV7dWRPy2Cqjh3A==
dependencies:
accepts "1.3.3"
base64id "1.0.0"
cookie "0.3.1"
debug "2.3.3"
engine.io-parser "1.3.2"
ws "~1.1.5"
engine.io@~3.4.0:
version "3.4.2"
resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-3.4.2.tgz#8fc84ee00388e3e228645e0a7d3dfaeed5bd122c"
@ -1333,9 +1476,9 @@ eslint-config-airbnb@^6.0.0:
resolved "https://registry.yarnpkg.com/eslint-config-airbnb/-/eslint-config-airbnb-6.2.0.tgz#4a28196aa4617de01b8c914e992a82e5d0886a6e"
integrity sha1-SigZaqRhfeAbjJFOmSqC5dCIam4=
eslint-config-scality@scality/Guidelines#71a059ad:
eslint-config-scality@scality/Guidelines#7.4.10.1:
version "1.1.0"
resolved "https://codeload.github.com/scality/Guidelines/tar.gz/71a059ad3fa0598d5bbb923badda58ccf06cc8a6"
resolved "https://codeload.github.com/scality/Guidelines/tar.gz/28879281ca8555daa4d890889f731e7064c15884"
dependencies:
commander "1.3.2"
markdownlint "0.0.8"
@ -1485,7 +1628,6 @@ fast-levenshtein@~2.0.6:
dependencies:
bindings "^1.1.1"
nan "^2.3.2"
node-gyp "^8.0.0"
figures@^1.3.5:
version "1.7.0"
@ -1619,7 +1761,7 @@ getpass@^0.1.1:
dependencies:
assert-plus "^1.0.0"
glob@3.2.11:
glob@3.2.11, glob@~3.2.8:
version "3.2.11"
resolved "https://registry.yarnpkg.com/glob/-/glob-3.2.11.tgz#4a973f635b9190f715d10987d5c00fd2815ebe3d"
integrity sha1-Spc/Y1uRkPcV0QmH1cAP0oFevj0=
@ -1746,6 +1888,13 @@ has-binary2@~1.0.2:
dependencies:
isarray "2.0.1"
has-binary@0.1.7:
version "0.1.7"
resolved "https://registry.yarnpkg.com/has-binary/-/has-binary-0.1.7.tgz#68e61eb16210c9545a0a5cce06a873912fe1e68c"
integrity sha1-aOYesWIQyVRaClzOBqhzkS/h5ow=
dependencies:
isarray "0.0.1"
has-cors@1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/has-cors/-/has-cors-1.1.0.tgz#5e474793f7ea9843d1bb99c23eef49ff126fff39"
@ -1761,6 +1910,11 @@ has-flag@^3.0.0:
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0=
has-flag@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
has-symbols@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8"
@ -1788,6 +1942,11 @@ hdclient@scality/hdclient#5145e04e5ed33e85106765b1caa90cd245ef482b:
dependencies:
werelogs scality/werelogs#GA7.2.0.5
hoek@4.x.x:
version "4.2.1"
resolved "https://registry.yarnpkg.com/hoek/-/hoek-4.2.1.tgz#9634502aa12c445dd5a7c5734b572bb8738aacbb"
integrity sha512-QLg82fGkfnJ/4iy1xZ81/9SIJiq1NGFUMGs6ParyjBZr6jW2Ufj/snDqTHixNlHdPNwN2RLVD0Pi3igeK9+JfA==
hosted-git-info@^2.1.4:
version "2.8.8"
resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.8.tgz#7539bd4bc1e0e0a895815a2e0262420b12858488"
@ -1947,6 +2106,11 @@ ioredis@^4.9.5:
redis-parser "^3.0.0"
standard-as-callback "^2.0.1"
ipaddr.js@1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.2.0.tgz#8aba49c9192799585bdd643e0ccb50e8ae777ba4"
integrity sha1-irpJyRknmVhb3WQ+DMtQ6K53e6Q=
ipaddr.js@1.9.1:
version "1.9.1"
resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3"
@ -2078,6 +2242,11 @@ isarray@^1.0.0, isarray@~1.0.0:
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
isemail@2.x.x:
version "2.2.1"
resolved "https://registry.yarnpkg.com/isemail/-/isemail-2.2.1.tgz#0353d3d9a62951080c262c2aa0a42b8ea8e9e2a6"
integrity sha1-A1PT2aYpUQgMJiwqoKQrjqjp4qY=
isexe@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
@ -2189,6 +2358,11 @@ istanbul@1.0.0-alpha.2:
which "^1.1.1"
wordwrap "^1.0.0"
items@2.x.x:
version "2.1.2"
resolved "https://registry.yarnpkg.com/items/-/items-2.1.2.tgz#0849354595805d586dac98e7e6e85556ea838558"
integrity sha512-kezcEqgB97BGeZZYtX/MA8AG410ptURstvnz5RAgyFZ8wQFPMxHY8GpTq+/ZHKT3frSlIthUq7EvLt9xn3TvXg==
jade@0.26.3:
version "0.26.3"
resolved "https://registry.yarnpkg.com/jade/-/jade-0.26.3.tgz#8f10d7977d8d79f2f6ff862a81b0513ccb25686c"
@ -2202,6 +2376,16 @@ jmespath@0.15.0:
resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.15.0.tgz#a3f222a9aae9f966f5d27c796510e28091764217"
integrity sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=
joi@^10.6:
version "10.6.0"
resolved "https://registry.yarnpkg.com/joi/-/joi-10.6.0.tgz#52587f02d52b8b75cdb0c74f0b164a191a0e1fc2"
integrity sha512-hBF3LcqyAid+9X/pwg+eXjD2QBZI5eXnBFJYaAkH4SK3mp9QSRiiQnDYlmlz5pccMvnLcJRS4whhDOTCkmsAdQ==
dependencies:
hoek "4.x.x"
isemail "2.x.x"
items "2.x.x"
topo "2.x.x"
"js-tokens@^3.0.0 || ^4.0.0":
version "4.0.0"
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
@ -2264,6 +2448,11 @@ json-stringify-safe@~5.0.1:
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=
json3@3.3.2:
version "3.3.2"
resolved "https://registry.yarnpkg.com/json3/-/json3-3.3.2.tgz#3c0434743df93e2f5c42aee7b19bcb483575f4e1"
integrity sha1-PAQ0dD35Pi9cQq7nsZvLSDV19OE=
jsonify@~0.0.0:
version "0.0.0"
resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73"
@ -2294,6 +2483,11 @@ jsprim@^1.2.2:
json-schema "0.2.3"
verror "1.10.0"
just-extend@^4.0.2:
version "4.2.1"
resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.2.1.tgz#ef5e589afb61e5d66b24eca749409a8939a8c744"
integrity sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg==
jwa@^1.4.1:
version "1.4.1"
resolved "https://registry.yarnpkg.com/jwa/-/jwa-1.4.1.tgz#743c32985cb9e98655530d53641b66c8645b039a"
@ -2479,6 +2673,11 @@ lodash.flatten@^4.4.0:
resolved "https://registry.yarnpkg.com/lodash.flatten/-/lodash.flatten-4.4.0.tgz#f31c22225a9632d2bbf8e4addbef240aa765a61f"
integrity sha1-8xwiIlqWMtK7+OSt2+8kCqdlph8=
lodash.get@^4.4.2:
version "4.4.2"
resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99"
integrity sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=
lodash.isstring@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451"
@ -2494,6 +2693,11 @@ lodash@^4.0.0, lodash@^4.14.0, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.4,
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.20.tgz#b44a9b6297bcb698f1c51a3545a2b3b368d59c52"
integrity sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==
lodash@~2.4.1:
version "2.4.2"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-2.4.2.tgz#fadd834b9683073da179b3eae6d9c0d15053f73e"
integrity sha1-+t2DS5aDBz2hebPq5tnA0VBT9z4=
lolex@^1.4.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/lolex/-/lolex-1.6.0.tgz#3a9a0283452a47d7439e72731b9e07d7386e49f6"
@ -2609,6 +2813,11 @@ mime-db@1.44.0:
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.44.0.tgz#fa11c5eb0aca1334b4233cb4d52f10c5a6272f92"
integrity sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg==
mime-db@1.51.0:
version "1.51.0"
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.51.0.tgz#d9ff62451859b18342d960850dc3cfb77e63fb0c"
integrity sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==
mime-types@^2.1.12, mime-types@~2.1.19, mime-types@~2.1.24:
version "2.1.27"
resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.27.tgz#47949f98e279ea53119f5722e0f34e529bec009f"
@ -2616,6 +2825,13 @@ mime-types@^2.1.12, mime-types@~2.1.19, mime-types@~2.1.24:
dependencies:
mime-db "1.44.0"
mime-types@~2.1.11:
version "2.1.34"
resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.34.tgz#5a712f9ec1503511a945803640fafe09d3793c24"
integrity sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==
dependencies:
mime-db "1.51.0"
mime@^1.3.4:
version "1.6.0"
resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
@ -2648,6 +2864,14 @@ minimatch@2.x:
dependencies:
brace-expansion "^1.0.0"
minimatch@~0.2.14:
version "0.2.14"
resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-0.2.14.tgz#c74e780574f63c6f9a090e90efbe6ef53a6a756a"
integrity sha1-x054BXT2PG+aCQ6Q775u9TpqdWo=
dependencies:
lru-cache "2"
sigmund "~1.0.0"
minimist@0.0.8:
version "0.0.8"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d"
@ -2742,6 +2966,11 @@ ms@0.7.1:
resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.1.tgz#9cd13c03adbff25b65effde7ce864ee952017098"
integrity sha1-nNE8A62/8ltl7/3nzoZO6VIBcJg=
ms@0.7.2:
version "0.7.2"
resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.2.tgz#ae25cf2512b3885a1d95d7f037868d8431124765"
integrity sha1-riXPJRKziFodldfwN4aNhDESR2U=
ms@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
@ -2772,6 +3001,11 @@ napi-macros@~2.0.0:
resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b"
integrity sha512-A0xLykHtARfueITVDernsAWdtIMbOJgKgcluwENp3AlsKN/PloyO10HtmoqnFAQAcxPkgZN7wdfPfEd0zNGxbg==
negotiator@0.6.1:
version "0.6.1"
resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.1.tgz#2b327184e8992101177b28563fb5e7102acd0ca9"
integrity sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=
negotiator@0.6.2:
version "0.6.2"
resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb"
@ -2792,6 +3026,17 @@ nice-try@^1.0.4:
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
nise@^5.1.1:
version "5.1.1"
resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.1.tgz#ac4237e0d785ecfcb83e20f389185975da5c31f3"
integrity sha512-yr5kW2THW1AkxVmCnKEh4nbYkJdB3I7LUkiUgOvEkOp414mc2UMaHMA7pjq1nYowhdoJZGwEKGaQVbxfpWj10A==
dependencies:
"@sinonjs/commons" "^1.8.3"
"@sinonjs/fake-timers" ">=5"
"@sinonjs/text-encoding" "^0.7.1"
just-extend "^4.0.2"
path-to-regexp "^1.7.0"
node-fetch@^2.3.0:
version "2.6.1"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052"
@ -2871,6 +3116,11 @@ oauth-sign@~0.9.0:
resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455"
integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==
object-assign@4.1.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.0.tgz#7a3b3d0e98063d43f4c03f2e8ae6cd51a86883a0"
integrity sha1-ejs9DpgGPUP0wD8uiubNUahog6A=
object-assign@^4.0.1, object-assign@^4.1.0:
version "4.1.1"
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
@ -2930,6 +3180,11 @@ optionator@^0.8.1:
type-check "~0.3.2"
word-wrap "~1.2.3"
options@>=0.0.5:
version "0.0.6"
resolved "https://registry.yarnpkg.com/options/-/options-0.0.6.tgz#ec22d312806bb53e731773e7cdaefcf1c643128f"
integrity sha1-7CLTEoBrtT5zF3Pnza788cZDEo8=
os-homedir@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3"
@ -2948,6 +3203,13 @@ parse-json@^4.0.0:
error-ex "^1.3.1"
json-parse-better-errors "^1.0.1"
parsejson@0.0.3:
version "0.0.3"
resolved "https://registry.yarnpkg.com/parsejson/-/parsejson-0.0.3.tgz#ab7e3759f209ece99437973f7d0f1f64ae0e64ab"
integrity sha1-q343WfIJ7OmUN5c/fQ8fZK4OZKs=
dependencies:
better-assert "~1.0.0"
parseqs@0.0.5:
version "0.0.5"
resolved "https://registry.yarnpkg.com/parseqs/-/parseqs-0.0.5.tgz#d5208a3738e46766e291ba2ea173684921a8b89d"
@ -2992,6 +3254,13 @@ path-parse@^1.0.5, path-parse@^1.0.6:
resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c"
integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==
path-to-regexp@^1.7.0:
version "1.8.0"
resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a"
integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==
dependencies:
isarray "0.0.1"
path-type@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/path-type/-/path-type-3.0.0.tgz#cef31dc8e0a1a3bb0d105c0cd97cf3bf47f4e36f"
@ -3423,6 +3692,15 @@ sigmund@~1.0.0:
resolved "https://registry.yarnpkg.com/sigmund/-/sigmund-1.0.1.tgz#3ff21f198cad2175f9f3b781853fd94d0d19b590"
integrity sha1-P/IfGYytIXX587eBhT/ZTQ0ZtZA=
simple-glob@^0.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/simple-glob/-/simple-glob-0.1.1.tgz#282bfa012d7206643df61d34c6bb9e4ce3fd7714"
integrity sha1-KCv6AS1yBmQ99h00xrueTOP9dxQ=
dependencies:
glob "~3.2.8"
lodash "~2.4.1"
minimatch "~0.2.14"
simple-glob@^0.2, simple-glob@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/simple-glob/-/simple-glob-0.2.0.tgz#95cf6a5fb5d84843a52a58529cba31b0f5c3478c"
@ -3433,16 +3711,53 @@ simple-glob@^0.2, simple-glob@^0.2.0:
lodash.flatten "^4.4.0"
lodash.union "^4.6.0"
sinon@^13.0.1:
version "13.0.1"
resolved "https://registry.yarnpkg.com/sinon/-/sinon-13.0.1.tgz#2a568beca2084c48985dd98e276e065c81738e3c"
integrity sha512-8yx2wIvkBjIq/MGY1D9h1LMraYW+z1X0mb648KZnKSdvLasvDu7maa0dFaNYdTDczFgbjNw2tOmWdTk9saVfwQ==
dependencies:
"@sinonjs/commons" "^1.8.3"
"@sinonjs/fake-timers" "^9.0.0"
"@sinonjs/samsam" "^6.1.1"
diff "^5.0.0"
nise "^5.1.1"
supports-color "^7.2.0"
slice-ansi@0.0.4:
version "0.0.4"
resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-0.0.4.tgz#edbf8903f66f7ce2f8eafd6ceed65e264c831b35"
integrity sha1-7b+JA/ZvfOL46v1s7tZeJkyDGzU=
socket.io-adapter@0.5.0:
version "0.5.0"
resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-0.5.0.tgz#cb6d4bb8bec81e1078b99677f9ced0046066bb8b"
integrity sha1-y21LuL7IHhB4uZZ3+c7QBGBmu4s=
dependencies:
debug "2.3.3"
socket.io-parser "2.3.1"
socket.io-adapter@~1.1.0:
version "1.1.2"
resolved "https://registry.yarnpkg.com/socket.io-adapter/-/socket.io-adapter-1.1.2.tgz#ab3f0d6f66b8fc7fca3959ab5991f82221789be9"
integrity sha512-WzZRUj1kUjrTIrUKpZLEzFZ1OLj5FwLlAFQs9kuZJzJi5DKdU7FsWc36SNmA8iDOtwBQyT8FkrriRM8vXLYz8g==
socket.io-client@1.7.4, socket.io-client@~1.7.3:
version "1.7.4"
resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-1.7.4.tgz#ec9f820356ed99ef6d357f0756d648717bdd4281"
integrity sha1-7J+CA1btme9tNX8HVtZIcXvdQoE=
dependencies:
backo2 "1.0.2"
component-bind "1.0.0"
component-emitter "1.2.1"
debug "2.3.3"
engine.io-client "~1.8.4"
has-binary "0.1.7"
indexof "0.0.1"
object-component "0.0.3"
parseuri "0.0.5"
socket.io-parser "2.3.1"
to-array "0.1.4"
socket.io-client@2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/socket.io-client/-/socket.io-client-2.3.0.tgz#14d5ba2e00b9bcd145ae443ab96b3f86cbcc1bb4"
@ -3480,6 +3795,16 @@ socket.io-client@~2.3.0:
socket.io-parser "~3.3.0"
to-array "0.1.4"
socket.io-parser@2.3.1:
version "2.3.1"
resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-2.3.1.tgz#dd532025103ce429697326befd64005fcfe5b4a0"
integrity sha1-3VMgJRA85Clpcya+/WQAX8/ltKA=
dependencies:
component-emitter "1.1.2"
debug "2.2.0"
isarray "0.0.1"
json3 "3.3.2"
socket.io-parser@~3.3.0:
version "3.3.1"
resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-3.3.1.tgz#f07d9c8cb3fb92633aa93e76d98fd3a334623199"
@ -3498,6 +3823,19 @@ socket.io-parser@~3.4.0:
debug "~4.1.0"
isarray "2.0.1"
socket.io@~1.7.3:
version "1.7.4"
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-1.7.4.tgz#2f7ecedc3391bf2d5c73e291fe233e6e34d4dd00"
integrity sha1-L37O3DORvy1cc+KR/iM+bjTU3QA=
dependencies:
debug "2.3.3"
engine.io "~1.8.4"
has-binary "0.1.7"
object-assign "4.1.0"
socket.io-adapter "0.5.0"
socket.io-client "1.7.4"
socket.io-parser "2.3.1"
socket.io@~2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-2.3.0.tgz#cd762ed6a4faeca59bc1f3e243c0969311eb73fb"
@ -3563,7 +3901,7 @@ sprintf-js@~1.0.2:
resolved "https://codeload.github.com/scality/sproxydclient/tar.gz/30e7115668bc7e10b4ec3cfdbaa7a124cdc21cc5"
dependencies:
async "^3.1.0"
werelogs scality/werelogs#351a2a3
werelogs scality/werelogs#8.1.0
sproxydclient@scality/sproxydclient#8.0.2:
version "8.0.2"
@ -3715,6 +4053,13 @@ supports-color@^5.3.0:
dependencies:
has-flag "^3.0.0"
supports-color@^7.2.0:
version "7.2.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da"
integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==
dependencies:
has-flag "^4.0.0"
table@^3.7.8:
version "3.8.3"
resolved "https://registry.yarnpkg.com/table/-/table-3.8.3.tgz#2bbc542f0fda9861a755d3947fefd8b3f513855f"
@ -3759,6 +4104,13 @@ to-iso-string@0.0.2:
resolved "https://registry.yarnpkg.com/to-iso-string/-/to-iso-string-0.0.2.tgz#4dc19e664dfccbe25bd8db508b00c6da158255d1"
integrity sha1-TcGeZk38y+Jb2NtQiwDG2hWCVdE=
topo@2.x.x:
version "2.0.2"
resolved "https://registry.yarnpkg.com/topo/-/topo-2.0.2.tgz#cd5615752539057c0dc0491a621c3bc6fbe1d182"
integrity sha1-zVYVdSU5BXwNwEkaYhw7xvvh0YI=
dependencies:
hoek "4.x.x"
tough-cookie@~2.5.0:
version "2.5.0"
resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2"
@ -3796,6 +4148,11 @@ type-check@~0.3.2:
dependencies:
prelude-ls "~1.1.2"
type-detect@4.0.8, type-detect@^4.0.8:
version "4.0.8"
resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c"
integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==
type-is@^1.6.4:
version "1.6.18"
resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"
@ -3853,6 +4210,11 @@ uglify-js@^3.1.4:
resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.11.5.tgz#d6788bc83cf35ff18ea78a65763e480803409bc6"
integrity sha512-btvv/baMqe7HxP7zJSF7Uc16h1mSfuuSplT0/qdjxseesDU+yYzH33eHBH+eMdeRXwujXspaCTooWHQVVBh09w==
ultron@1.0.x:
version "1.0.2"
resolved "https://registry.yarnpkg.com/ultron/-/ultron-1.0.2.tgz#ace116ab557cd197386a4e88f4685378c8b2e4fa"
integrity sha1-rOEWq1V80Zc4ak6I9GhTeMiy5Po=
underscore@~1.8.3:
version "1.8.3"
resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.8.3.tgz#4f3fb53b106e6097fcf9cb4109f2a5e9bdfa5022"
@ -3980,6 +4342,12 @@ werelogs@scality/werelogs#351a2a3:
dependencies:
safe-json-stringify "1.0.3"
werelogs@scality/werelogs#4e0d97c:
version "7.4.1"
resolved "https://codeload.github.com/scality/werelogs/tar.gz/4e0d97cf69ea7ed60bea90756278513e7e7ea9b1"
dependencies:
safe-json-stringify "1.0.3"
werelogs@scality/werelogs#8.1.0:
version "8.1.0"
resolved "https://codeload.github.com/scality/werelogs/tar.gz/e8f828725642c54c511cdbe580b18f43d3589313"
@ -4026,6 +4394,14 @@ ws@^7.1.2:
resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.0.tgz#a5dd76a24197940d4a8bb9e0e152bb4503764da7"
integrity sha512-kyFwXuV/5ymf+IXhS6f0+eAFvydbaBW3zjpT6hUdAh/hbVjTIB5EHBGi0bPoCLSK2wcuz3BrEkB9LrYv1Nm4NQ==
ws@~1.1.5:
version "1.1.5"
resolved "https://registry.yarnpkg.com/ws/-/ws-1.1.5.tgz#cbd9e6e75e09fc5d2c90015f21f0c40875e0dd51"
integrity sha512-o3KqipXNUdS7wpQzBHSe180lBGO60SoK0yVo3CYJgb2MkobuWuBX6dhkYP5ORCLd55y+SaflMOV5fqAB53ux4w==
dependencies:
options ">=0.0.5"
ultron "1.0.x"
ws@~6.1.0:
version "6.1.4"
resolved "https://registry.yarnpkg.com/ws/-/ws-6.1.4.tgz#5b5c8800afab925e94ccb29d153c8d02c1776ef9"
@ -4033,6 +4409,11 @@ ws@~6.1.0:
dependencies:
async-limiter "~1.0.0"
wtf-8@1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/wtf-8/-/wtf-8-1.0.0.tgz#392d8ba2d0f1c34d1ee2d630f15d0efb68e1048a"
integrity sha1-OS2LotDxw00e4tYw8V0O+2jhBIo=
xml2js@0.2.8:
version "0.2.8"
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.2.8.tgz#9b81690931631ff09d1957549faf54f4f980b3c2"
@ -4086,6 +4467,11 @@ xmlbuilder@~11.0.0:
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3"
integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==
xmlhttprequest-ssl@1.6.3:
version "1.6.3"
resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.6.3.tgz#03b713873b01659dfa2c1c5d056065b27ddc2de6"
integrity sha512-3XfeQE/wNkvrIktn2Kf0869fC0BN6UpydVasGIeSm2B1Llihf7/0UfZM+eCkOw3P7bP4+qPgqhm7ZoxuJtFU0Q==
xmlhttprequest-ssl@~1.5.4:
version "1.5.5"
resolved "https://registry.yarnpkg.com/xmlhttprequest-ssl/-/xmlhttprequest-ssl-1.5.5.tgz#c2876b06168aadc40e57d97e81191ac8f4398b3e"