Compare commits
3 Commits
developmen
...
hotfix/7.7
Author | SHA1 | Date |
---|---|---|
Maha Benzekri | 09a474d3ea | |
Maha Benzekri | ec237cf6f3 | |
Maha Benzekri | b3d875c938 |
|
@ -1,6 +1 @@
|
||||||
{
|
{ "extends": "scality" }
|
||||||
"extends": "scality",
|
|
||||||
"parserOptions": {
|
|
||||||
"ecmaVersion": 2020
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,25 +0,0 @@
|
||||||
---
|
|
||||||
name: codeQL
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [development/*, stabilization/*, hotfix/*]
|
|
||||||
pull_request:
|
|
||||||
branches: [development/*, stabilization/*, hotfix/*]
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
analyze:
|
|
||||||
name: Static analysis with CodeQL
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
|
||||||
uses: github/codeql-action/init@v3
|
|
||||||
with:
|
|
||||||
languages: javascript, typescript
|
|
||||||
|
|
||||||
- name: Build and analyze
|
|
||||||
uses: github/codeql-action/analyze@v3
|
|
|
@ -1,16 +0,0 @@
|
||||||
---
|
|
||||||
name: dependency review
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
branches: [development/*, stabilization/*, hotfix/*]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
dependency-review:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: 'Checkout Repository'
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: 'Dependency Review'
|
|
||||||
uses: actions/dependency-review-action@v4
|
|
|
@ -25,30 +25,24 @@ jobs:
|
||||||
- 6379:6379
|
- 6379:6379
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v2
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v2
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: 'yarn'
|
cache: 'yarn'
|
||||||
- name: install dependencies
|
- name: install dependencies
|
||||||
run: yarn install --frozen-lockfile --prefer-offline --network-concurrency 1
|
run: yarn install --frozen-lockfile --prefer-offline
|
||||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||||
- name: lint yaml
|
- name: lint yaml
|
||||||
run: yarn --silent lint_yml
|
run: yarn --silent lint_yml
|
||||||
- name: lint javascript
|
- name: lint javascript
|
||||||
run: yarn --silent lint --max-warnings 0
|
run: yarn --silent lint -- --max-warnings 0
|
||||||
- name: lint markdown
|
- name: lint markdown
|
||||||
run: yarn --silent lint_md
|
run: yarn --silent lint_md
|
||||||
- name: add hostname
|
- name: run unit tests
|
||||||
run: |
|
run: yarn test
|
||||||
sudo sh -c "echo '127.0.0.1 testrequestbucket.localhost' >> /etc/hosts"
|
|
||||||
- name: test and coverage
|
|
||||||
run: yarn --silent coverage
|
|
||||||
- name: run functional tests
|
- name: run functional tests
|
||||||
run: yarn ft_test
|
run: yarn ft_test
|
||||||
- uses: codecov/codecov-action@v4
|
|
||||||
with:
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
- name: run executables tests
|
- name: run executables tests
|
||||||
run: yarn install && yarn test
|
run: yarn install && yarn test
|
||||||
working-directory: 'lib/executables/pensieveCreds/'
|
working-directory: 'lib/executables/pensieveCreds/'
|
||||||
|
@ -59,9 +53,9 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v2
|
||||||
- name: Install NodeJS
|
- name: Install NodeJS
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v2
|
||||||
with:
|
with:
|
||||||
node-version: '16'
|
node-version: '16'
|
||||||
cache: yarn
|
cache: yarn
|
||||||
|
@ -72,7 +66,7 @@ jobs:
|
||||||
run: yarn build
|
run: yarn build
|
||||||
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
continue-on-error: true # TODO ARSN-97 Remove it when no errors in TS
|
||||||
- name: Upload artifacts
|
- name: Upload artifacts
|
||||||
uses: scality/action-artifacts@v4
|
uses: scality/action-artifacts@v2
|
||||||
with:
|
with:
|
||||||
url: https://artifacts.scality.net
|
url: https://artifacts.scality.net
|
||||||
user: ${{ secrets.ARTIFACTS_USER }}
|
user: ${{ secrets.ARTIFACTS_USER }}
|
||||||
|
|
12
.swcrc
12
.swcrc
|
@ -1,12 +0,0 @@
|
||||||
{
|
|
||||||
"$schema": "https://swc.rs/schema.json",
|
|
||||||
"jsc": {
|
|
||||||
"parser": {
|
|
||||||
"syntax": "typescript"
|
|
||||||
},
|
|
||||||
"target": "es2017"
|
|
||||||
},
|
|
||||||
"module": {
|
|
||||||
"type": "commonjs"
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,7 +1,5 @@
|
||||||
# Arsenal
|
# Arsenal
|
||||||
|
|
||||||
[![codecov](https://codecov.io/gh/scality/Arsenal/branch/development/8.1/graph/badge.svg?token=X0esXhJSwb)](https://codecov.io/gh/scality/Arsenal)
|
|
||||||
|
|
||||||
Common utilities for the S3 project components
|
Common utilities for the S3 project components
|
||||||
|
|
||||||
Within this repository, you will be able to find the shared libraries for the
|
Within this repository, you will be able to find the shared libraries for the
|
||||||
|
|
|
@ -85,66 +85,6 @@ Used to store the bucket lifecycle configuration info
|
||||||
|
|
||||||
### Properties Added
|
### Properties Added
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._uid = uid || uuid();
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to set a unique identifier on a bucket
|
|
||||||
|
|
||||||
## Model version 8
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._readLocationConstraint = readLocationConstraint || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store default read location of the bucket
|
|
||||||
|
|
||||||
## Model version 9
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._isNFS = isNFS || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to determine whether the bucket may be accessed through NFS
|
|
||||||
|
|
||||||
## Model version 10
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._ingestion = ingestionConfig || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store the ingestion status of a bucket
|
|
||||||
|
|
||||||
## Model version 11
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._azureInfo = azureInfo || null;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store Azure storage account specific information
|
|
||||||
|
|
||||||
## Model version 12
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
this._objectLockEnabled = objectLockEnabled || false;
|
this._objectLockEnabled = objectLockEnabled || false;
|
||||||
this._objectLockConfiguration = objectLockConfiguration || null;
|
this._objectLockConfiguration = objectLockConfiguration || null;
|
||||||
|
@ -155,7 +95,7 @@ this._objectLockConfiguration = objectLockConfiguration || null;
|
||||||
Used to determine whether object lock capabilities are enabled on a bucket and
|
Used to determine whether object lock capabilities are enabled on a bucket and
|
||||||
to store the object lock configuration of the bucket
|
to store the object lock configuration of the bucket
|
||||||
|
|
||||||
## Model version 13
|
## Model version 8
|
||||||
|
|
||||||
### Properties Added
|
### Properties Added
|
||||||
|
|
||||||
|
@ -167,7 +107,7 @@ this._notificationConfiguration = notificationConfiguration || null;
|
||||||
|
|
||||||
Used to store the bucket notification configuration info
|
Used to store the bucket notification configuration info
|
||||||
|
|
||||||
## Model version 14
|
## Model version 9
|
||||||
|
|
||||||
### Properties Added
|
### Properties Added
|
||||||
|
|
||||||
|
@ -179,7 +119,19 @@ this._serverSideEncryption.configuredMasterKeyId = configuredMasterKeyId || unde
|
||||||
|
|
||||||
Used to store the users configured KMS key id
|
Used to store the users configured KMS key id
|
||||||
|
|
||||||
## Model version 15
|
## Model version 10
|
||||||
|
|
||||||
|
### Properties Added
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
this._uid = uid || uuid();
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
Used to set a unique identifier on a bucket
|
||||||
|
|
||||||
|
## Model version 11
|
||||||
|
|
||||||
### Properties Added
|
### Properties Added
|
||||||
|
|
||||||
|
@ -187,74 +139,6 @@ Used to store the users configured KMS key id
|
||||||
this._tags = tags || null;
|
this._tags = tags || null;
|
||||||
```
|
```
|
||||||
|
|
||||||
The Tag Set of a bucket is an array of objects with Key and Value:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
[
|
|
||||||
{
|
|
||||||
Key: 'something',
|
|
||||||
Value: 'some_data'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
## Model version 16
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._capabilities = capabilities || undefined;
|
|
||||||
```
|
|
||||||
|
|
||||||
For capacity-enabled buckets, contains the following data:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
{
|
|
||||||
_capabilities: {
|
|
||||||
VeeamSOSApi?: {
|
|
||||||
SystemInfo?: {
|
|
||||||
ProtocolVersion: String,
|
|
||||||
ModelName: String,
|
|
||||||
ProtocolCapabilities: {
|
|
||||||
CapacityInfo: Boolean,
|
|
||||||
UploadSessions: Boolean,
|
|
||||||
IAMSTS: Boolean,
|
|
||||||
},
|
|
||||||
APIEndpoints: {
|
|
||||||
IAMEndpoint: String,
|
|
||||||
STSEndpoint: String,
|
|
||||||
},
|
|
||||||
SystemRecommendations?: {
|
|
||||||
S3ConcurrentTaskLimit: Number,
|
|
||||||
S3MultiObjectDelete: Number,
|
|
||||||
StorageCurrentTasksLimit: Number,
|
|
||||||
KbBlockSize: Number,
|
|
||||||
}
|
|
||||||
LastModified?: String,
|
|
||||||
},
|
|
||||||
CapacityInfo?: {
|
|
||||||
Capacity: Number,
|
|
||||||
Available: Number,
|
|
||||||
Used: Number,
|
|
||||||
LastModified?: String,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
### Usage
|
||||||
|
|
||||||
Used to store bucket tagging
|
Used to store bucket tagging
|
||||||
|
|
||||||
## Model version 17
|
|
||||||
|
|
||||||
### Properties Added
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
this._quotaMax = quotaMax || 0;
|
|
||||||
```
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
Used to store bucket quota
|
|
|
@ -1,27 +0,0 @@
|
||||||
# Delimiter
|
|
||||||
|
|
||||||
The Delimiter class handles raw listings from the database with an
|
|
||||||
optional delimiter, and fills in a curated listing with "Contents" and
|
|
||||||
"CommonPrefixes" as a result.
|
|
||||||
|
|
||||||
## Expected Behavior
|
|
||||||
|
|
||||||
- only lists keys belonging to the given **prefix** (if provided)
|
|
||||||
|
|
||||||
- groups listed keys that have a common prefix ending with a delimiter
|
|
||||||
inside CommonPrefixes
|
|
||||||
|
|
||||||
- can take a **marker** or **continuationToken** to list from a specific key
|
|
||||||
|
|
||||||
- can take a **maxKeys** parameter to limit how many keys can be returned
|
|
||||||
|
|
||||||
## State Chart
|
|
||||||
|
|
||||||
- States with grey background are *Idle* states, which are waiting for
|
|
||||||
a new listing key
|
|
||||||
|
|
||||||
- States with blue background are *Processing* states, which are
|
|
||||||
actively processing a new listing key passed by the filter()
|
|
||||||
function
|
|
||||||
|
|
||||||
![Delimiter State Chart](./pics/delimiterStateChart.svg)
|
|
|
@ -1,45 +0,0 @@
|
||||||
# DelimiterMaster
|
|
||||||
|
|
||||||
The DelimiterMaster class handles raw listings from the database of a
|
|
||||||
versioned or non-versioned bucket with an optional delimiter, and
|
|
||||||
fills in a curated listing with "Contents" and "CommonPrefixes" as a
|
|
||||||
result.
|
|
||||||
|
|
||||||
## Expected Behavior
|
|
||||||
|
|
||||||
- only lists latest versions of versioned buckets
|
|
||||||
|
|
||||||
- only lists keys belonging to the given **prefix** (if provided)
|
|
||||||
|
|
||||||
- does not list latest versions that are delete markers
|
|
||||||
|
|
||||||
- groups listed keys that have a common prefix ending with a delimiter
|
|
||||||
inside CommonPrefixes
|
|
||||||
|
|
||||||
- can take a **marker** or **continuationToken** to list from a specific key
|
|
||||||
|
|
||||||
- can take a **maxKeys** parameter to limit how many keys can be returned
|
|
||||||
|
|
||||||
- reconciles internal PHD keys with the next version (those are
|
|
||||||
created when a specific version that is the latest version is
|
|
||||||
deleted)
|
|
||||||
|
|
||||||
- skips internal keys like replay keys
|
|
||||||
|
|
||||||
## State Chart
|
|
||||||
|
|
||||||
- States with grey background are *Idle* states, which are waiting for
|
|
||||||
a new listing key
|
|
||||||
|
|
||||||
- States with blue background are *Processing* states, which are
|
|
||||||
actively processing a new listing key passed by the filter()
|
|
||||||
function
|
|
||||||
|
|
||||||
### Bucket Vformat=v0
|
|
||||||
|
|
||||||
![DelimiterMaster State Chart for v0 format](./pics/delimiterMasterV0StateChart.svg)
|
|
||||||
|
|
||||||
### Bucket Vformat=v1
|
|
||||||
|
|
||||||
For buckets in versioning key format **v1**, the algorithm used is the
|
|
||||||
one from [Delimiter](delimiter.md).
|
|
|
@ -1,45 +0,0 @@
|
||||||
digraph {
|
|
||||||
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
|
||||||
edge [fontsize=14];
|
|
||||||
rankdir=TB;
|
|
||||||
|
|
||||||
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
|
||||||
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
|
||||||
|
|
||||||
node [fillcolor="lightgrey"];
|
|
||||||
"NotSkippingPrefixNorVersions.Idle" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
|
||||||
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
|
||||||
"SkippingVersions.Idle" [label="SkippingVersions",group="SkippingVersions"];
|
|
||||||
"WaitVersionAfterPHD.Idle" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
|
||||||
|
|
||||||
node [fillcolor="lightblue"];
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" [label="NotSkippingPrefixNorVersions",group="NotSkippingPrefixNorVersions",width=4];
|
|
||||||
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
|
||||||
"SkippingVersions.Processing" [label="SkippingVersions",group="SkippingVersions"];
|
|
||||||
"WaitVersionAfterPHD.Processing" [label="WaitVersionAfterPHD",group="WaitVersionAfterPHD"];
|
|
||||||
|
|
||||||
START -> "SkippingVersions.Idle" [label="[marker != undefined]"]
|
|
||||||
START -> "NotSkippingPrefixNorVersions.Idle" [label="[marker == undefined]"]
|
|
||||||
|
|
||||||
"NotSkippingPrefixNorVersions.Idle" -> "NotSkippingPrefixNorVersions.Processing" [label="filter(key, value)"]
|
|
||||||
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
|
||||||
"SkippingVersions.Idle" -> "SkippingVersions.Processing" [label="filter(key, value)"]
|
|
||||||
"WaitVersionAfterPHD.Idle" -> "WaitVersionAfterPHD.Processing" [label="filter(key, value)"]
|
|
||||||
|
|
||||||
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[Version.isDeleteMarker(value)]\n-> FILTER_ACCEPT"]
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" -> "WaitVersionAfterPHD.Idle" [label="[Version.isPHD(value)]\n-> FILTER_ACCEPT"]
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(<ReplayPrefix>)]\n/ prefix <- <ReplayPrefix>\n-> FILTER_SKIP"]
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" -> END [label="[isListableKey(key, value) and\nKeys == maxKeys]\n-> FILTER_END"]
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingPrefix.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nhasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
|
||||||
"NotSkippingPrefixNorVersions.Processing" -> "SkippingVersions.Idle" [label="[isListableKey(key, value) and\nnKeys < maxKeys and\nnot hasDelimiter(key)]\n/ Contents.append(key, value)\n-> FILTER_ACCEPT"]
|
|
||||||
|
|
||||||
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
|
||||||
"SkippingPrefix.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not key.startsWith(prefix)]"]
|
|
||||||
|
|
||||||
"SkippingVersions.Processing" -> "SkippingVersions.Idle" [label="[isVersionKey(key)]\n-> FILTER_SKIP"]
|
|
||||||
"SkippingVersions.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key)]"]
|
|
||||||
|
|
||||||
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[isVersionKey(key) and master(key) == PHDkey]\n/ key <- master(key)"]
|
|
||||||
"WaitVersionAfterPHD.Processing" -> "NotSkippingPrefixNorVersions.Processing" [label="[not isVersionKey(key) or master(key) != PHDkey]"]
|
|
||||||
}
|
|
|
@ -1,216 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
|
||||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
|
||||||
<!-- Generated by graphviz version 2.43.0 (0)
|
|
||||||
-->
|
|
||||||
<!-- Title: %3 Pages: 1 -->
|
|
||||||
<svg width="2313pt" height="460pt"
|
|
||||||
viewBox="0.00 0.00 2313.37 460.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
|
||||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 456)">
|
|
||||||
<title>%3</title>
|
|
||||||
<polygon fill="white" stroke="transparent" points="-4,4 -4,-456 2309.37,-456 2309.37,4 -4,4"/>
|
|
||||||
<!-- START -->
|
|
||||||
<g id="node1" class="node">
|
|
||||||
<title>START</title>
|
|
||||||
<ellipse fill="black" stroke="black" cx="35.37" cy="-445" rx="7" ry="7"/>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Idle -->
|
|
||||||
<g id="node3" class="node">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M925.37,-387C925.37,-387 661.37,-387 661.37,-387 655.37,-387 649.37,-381 649.37,-375 649.37,-375 649.37,-363 649.37,-363 649.37,-357 655.37,-351 661.37,-351 661.37,-351 925.37,-351 925.37,-351 931.37,-351 937.37,-357 937.37,-363 937.37,-363 937.37,-375 937.37,-375 937.37,-381 931.37,-387 925.37,-387"/>
|
|
||||||
<text text-anchor="middle" x="793.37" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
|
||||||
</g>
|
|
||||||
<!-- START->NotSkippingPrefixNorVersions.Idle -->
|
|
||||||
<g id="edge2" class="edge">
|
|
||||||
<title>START->NotSkippingPrefixNorVersions.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M42.39,-443.31C95.3,-438.15 434.98,-404.99 638.94,-385.08"/>
|
|
||||||
<polygon fill="black" stroke="black" points="639.54,-388.53 649.15,-384.08 638.86,-381.57 639.54,-388.53"/>
|
|
||||||
<text text-anchor="middle" x="497.87" y="-408.8" font-family="Times,serif" font-size="14.00">[marker == undefined]</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Idle -->
|
|
||||||
<g id="node5" class="node">
|
|
||||||
<title>SkippingVersions.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M242.37,-138C242.37,-138 50.37,-138 50.37,-138 44.37,-138 38.37,-132 38.37,-126 38.37,-126 38.37,-114 38.37,-114 38.37,-108 44.37,-102 50.37,-102 50.37,-102 242.37,-102 242.37,-102 248.37,-102 254.37,-108 254.37,-114 254.37,-114 254.37,-126 254.37,-126 254.37,-132 248.37,-138 242.37,-138"/>
|
|
||||||
<text text-anchor="middle" x="146.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
|
||||||
</g>
|
|
||||||
<!-- START->SkippingVersions.Idle -->
|
|
||||||
<g id="edge1" class="edge">
|
|
||||||
<title>START->SkippingVersions.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M33.04,-438.14C20.64,-405.9 -34.57,-248.17 33.37,-156 36.76,-151.4 40.74,-147.39 45.16,-143.89"/>
|
|
||||||
<polygon fill="black" stroke="black" points="47.27,-146.68 53.53,-138.13 43.3,-140.92 47.27,-146.68"/>
|
|
||||||
<text text-anchor="middle" x="85.87" y="-321.8" font-family="Times,serif" font-size="14.00">[marker != undefined]</text>
|
|
||||||
</g>
|
|
||||||
<!-- END -->
|
|
||||||
<g id="node2" class="node">
|
|
||||||
<title>END</title>
|
|
||||||
<ellipse fill="black" stroke="black" cx="727.37" cy="-120" rx="7" ry="7"/>
|
|
||||||
<ellipse fill="none" stroke="black" cx="727.37" cy="-120" rx="11" ry="11"/>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing -->
|
|
||||||
<g id="node7" class="node">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M925.37,-300C925.37,-300 661.37,-300 661.37,-300 655.37,-300 649.37,-294 649.37,-288 649.37,-288 649.37,-276 649.37,-276 649.37,-270 655.37,-264 661.37,-264 661.37,-264 925.37,-264 925.37,-264 931.37,-264 937.37,-270 937.37,-276 937.37,-276 937.37,-288 937.37,-288 937.37,-294 931.37,-300 925.37,-300"/>
|
|
||||||
<text text-anchor="middle" x="793.37" y="-278.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefixNorVersions</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing -->
|
|
||||||
<g id="edge3" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Idle->NotSkippingPrefixNorVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M793.37,-350.8C793.37,-339.16 793.37,-323.55 793.37,-310.24"/>
|
|
||||||
<polygon fill="black" stroke="black" points="796.87,-310.18 793.37,-300.18 789.87,-310.18 796.87,-310.18"/>
|
|
||||||
<text text-anchor="middle" x="851.37" y="-321.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Idle -->
|
|
||||||
<g id="node4" class="node">
|
|
||||||
<title>SkippingPrefix.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M1209.37,-138C1209.37,-138 1017.37,-138 1017.37,-138 1011.37,-138 1005.37,-132 1005.37,-126 1005.37,-126 1005.37,-114 1005.37,-114 1005.37,-108 1011.37,-102 1017.37,-102 1017.37,-102 1209.37,-102 1209.37,-102 1215.37,-102 1221.37,-108 1221.37,-114 1221.37,-114 1221.37,-126 1221.37,-126 1221.37,-132 1215.37,-138 1209.37,-138"/>
|
|
||||||
<text text-anchor="middle" x="1113.37" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing -->
|
|
||||||
<g id="node8" class="node">
|
|
||||||
<title>SkippingPrefix.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M1070.37,-36C1070.37,-36 878.37,-36 878.37,-36 872.37,-36 866.37,-30 866.37,-24 866.37,-24 866.37,-12 866.37,-12 866.37,-6 872.37,0 878.37,0 878.37,0 1070.37,0 1070.37,0 1076.37,0 1082.37,-6 1082.37,-12 1082.37,-12 1082.37,-24 1082.37,-24 1082.37,-30 1076.37,-36 1070.37,-36"/>
|
|
||||||
<text text-anchor="middle" x="974.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
|
||||||
<g id="edge4" class="edge">
|
|
||||||
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M1011.89,-101.96C994.96,-97.13 981.04,-91.17 975.37,-84 967.11,-73.56 966.25,-58.93 967.72,-46.2"/>
|
|
||||||
<polygon fill="black" stroke="black" points="971.22,-46.52 969.4,-36.09 964.31,-45.38 971.22,-46.52"/>
|
|
||||||
<text text-anchor="middle" x="1033.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Processing -->
|
|
||||||
<g id="node9" class="node">
|
|
||||||
<title>SkippingVersions.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M381.37,-36C381.37,-36 189.37,-36 189.37,-36 183.37,-36 177.37,-30 177.37,-24 177.37,-24 177.37,-12 177.37,-12 177.37,-6 183.37,0 189.37,0 189.37,0 381.37,0 381.37,0 387.37,0 393.37,-6 393.37,-12 393.37,-12 393.37,-24 393.37,-24 393.37,-30 387.37,-36 381.37,-36"/>
|
|
||||||
<text text-anchor="middle" x="285.37" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingVersions</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Idle->SkippingVersions.Processing -->
|
|
||||||
<g id="edge5" class="edge">
|
|
||||||
<title>SkippingVersions.Idle->SkippingVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M141.4,-101.91C138.35,-87.58 136.8,-67.37 147.37,-54 151.89,-48.28 161.64,-43.34 173.99,-39.12"/>
|
|
||||||
<polygon fill="black" stroke="black" points="175.39,-42.36 183.89,-36.04 173.3,-35.67 175.39,-42.36"/>
|
|
||||||
<text text-anchor="middle" x="205.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitVersionAfterPHD.Idle -->
|
|
||||||
<g id="node6" class="node">
|
|
||||||
<title>WaitVersionAfterPHD.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M1534.37,-138C1534.37,-138 1342.37,-138 1342.37,-138 1336.37,-138 1330.37,-132 1330.37,-126 1330.37,-126 1330.37,-114 1330.37,-114 1330.37,-108 1336.37,-102 1342.37,-102 1342.37,-102 1534.37,-102 1534.37,-102 1540.37,-102 1546.37,-108 1546.37,-114 1546.37,-114 1546.37,-126 1546.37,-126 1546.37,-132 1540.37,-138 1534.37,-138"/>
|
|
||||||
<text text-anchor="middle" x="1438.37" y="-116.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitVersionAfterPHD.Processing -->
|
|
||||||
<g id="node10" class="node">
|
|
||||||
<title>WaitVersionAfterPHD.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M1534.37,-36C1534.37,-36 1342.37,-36 1342.37,-36 1336.37,-36 1330.37,-30 1330.37,-24 1330.37,-24 1330.37,-12 1330.37,-12 1330.37,-6 1336.37,0 1342.37,0 1342.37,0 1534.37,0 1534.37,0 1540.37,0 1546.37,-6 1546.37,-12 1546.37,-12 1546.37,-24 1546.37,-24 1546.37,-30 1540.37,-36 1534.37,-36"/>
|
|
||||||
<text text-anchor="middle" x="1438.37" y="-14.2" font-family="Times,serif" font-size="16.00">WaitVersionAfterPHD</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing -->
|
|
||||||
<g id="edge6" class="edge">
|
|
||||||
<title>WaitVersionAfterPHD.Idle->WaitVersionAfterPHD.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M1438.37,-101.58C1438.37,-86.38 1438.37,-64.07 1438.37,-46.46"/>
|
|
||||||
<polygon fill="black" stroke="black" points="1441.87,-46.22 1438.37,-36.22 1434.87,-46.22 1441.87,-46.22"/>
|
|
||||||
<text text-anchor="middle" x="1496.37" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing->END -->
|
|
||||||
<g id="edge10" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing->END</title>
|
|
||||||
<path fill="none" stroke="black" d="M649.15,-273.62C611.7,-268.54 578.44,-260.07 566.37,-246 540.33,-215.64 540,-186.08 566.37,-156 586.46,-133.07 673.88,-148.86 702.37,-138 705.22,-136.91 708.06,-135.44 710.76,-133.82"/>
|
|
||||||
<polygon fill="black" stroke="black" points="712.88,-136.61 719.13,-128.05 708.91,-130.84 712.88,-136.61"/>
|
|
||||||
<text text-anchor="middle" x="672.87" y="-212.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
|
||||||
<text text-anchor="middle" x="672.87" y="-197.3" font-family="Times,serif" font-size="14.00">Keys == maxKeys]</text>
|
|
||||||
<text text-anchor="middle" x="672.87" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
|
||||||
<g id="edge9" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M937.6,-274.31C1018.89,-269.01 1106.69,-260.11 1119.37,-246 1143.16,-219.51 1134.03,-175.72 1124.38,-147.62"/>
|
|
||||||
<polygon fill="black" stroke="black" points="1127.6,-146.22 1120.86,-138.04 1121.03,-148.64 1127.6,-146.22"/>
|
|
||||||
<text text-anchor="middle" x="1254.37" y="-212.3" font-family="Times,serif" font-size="14.00">[key.startsWith(<ReplayPrefix>)]</text>
|
|
||||||
<text text-anchor="middle" x="1254.37" y="-197.3" font-family="Times,serif" font-size="14.00">/ prefix <- <ReplayPrefix></text>
|
|
||||||
<text text-anchor="middle" x="1254.37" y="-182.3" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle -->
|
|
||||||
<g id="edge11" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M799.18,-263.65C800.96,-258.05 802.85,-251.79 804.37,-246 814.73,-206.45 793.03,-183.41 823.37,-156 851.23,-130.83 954.1,-142.59 991.37,-138 992.65,-137.84 993.94,-137.68 995.24,-137.52"/>
|
|
||||||
<polygon fill="black" stroke="black" points="995.81,-140.98 1005.29,-136.25 994.93,-134.03 995.81,-140.98"/>
|
|
||||||
<text text-anchor="middle" x="969.37" y="-234.8" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
|
||||||
<text text-anchor="middle" x="969.37" y="-219.8" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
|
||||||
<text text-anchor="middle" x="969.37" y="-204.8" font-family="Times,serif" font-size="14.00">hasDelimiter(key)]</text>
|
|
||||||
<text text-anchor="middle" x="969.37" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
|
||||||
<text text-anchor="middle" x="969.37" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
|
||||||
<text text-anchor="middle" x="969.37" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
|
||||||
<g id="edge7" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M649.11,-279.23C439.56,-275.94 73.58,-267.19 53.37,-246 25.76,-217.06 30.6,-188.89 53.37,-156 56.56,-151.39 60.44,-147.39 64.78,-143.91"/>
|
|
||||||
<polygon fill="black" stroke="black" points="66.8,-146.76 73.04,-138.2 62.83,-141 66.8,-146.76"/>
|
|
||||||
<text text-anchor="middle" x="167.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isDeleteMarker(value)]</text>
|
|
||||||
<text text-anchor="middle" x="167.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle -->
|
|
||||||
<g id="edge12" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing->SkippingVersions.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M649.33,-279.1C514.97,-275.99 331.4,-267.75 305.37,-246 273.69,-219.53 311.53,-185.22 282.37,-156 276.73,-150.36 270.32,-145.59 263.42,-141.56"/>
|
|
||||||
<polygon fill="black" stroke="black" points="264.92,-138.39 254.44,-136.84 261.67,-144.59 264.92,-138.39"/>
|
|
||||||
<text text-anchor="middle" x="411.87" y="-227.3" font-family="Times,serif" font-size="14.00">[isListableKey(key, value) and</text>
|
|
||||||
<text text-anchor="middle" x="411.87" y="-212.3" font-family="Times,serif" font-size="14.00">nKeys < maxKeys and</text>
|
|
||||||
<text text-anchor="middle" x="411.87" y="-197.3" font-family="Times,serif" font-size="14.00">not hasDelimiter(key)]</text>
|
|
||||||
<text text-anchor="middle" x="411.87" y="-182.3" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
|
||||||
<text text-anchor="middle" x="411.87" y="-167.3" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle -->
|
|
||||||
<g id="edge8" class="edge">
|
|
||||||
<title>NotSkippingPrefixNorVersions.Processing->WaitVersionAfterPHD.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M937.38,-280.87C1099.43,-279.42 1344.59,-272.74 1378.37,-246 1411.11,-220.08 1384.48,-192.16 1405.37,-156 1407.38,-152.52 1409.8,-149.11 1412.4,-145.87"/>
|
|
||||||
<polygon fill="black" stroke="black" points="1415.16,-148.04 1419.13,-138.21 1409.9,-143.41 1415.16,-148.04"/>
|
|
||||||
<text text-anchor="middle" x="1486.87" y="-204.8" font-family="Times,serif" font-size="14.00">[Version.isPHD(value)]</text>
|
|
||||||
<text text-anchor="middle" x="1486.87" y="-189.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
|
||||||
<g id="edge13" class="edge">
|
|
||||||
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M1064.61,-36.08C1074.44,-40.7 1083.66,-46.57 1091.37,-54 1101.65,-63.92 1107.13,-78.81 1110.04,-91.84"/>
|
|
||||||
<polygon fill="black" stroke="black" points="1106.62,-92.56 1111.88,-101.76 1113.5,-91.29 1106.62,-92.56"/>
|
|
||||||
<text text-anchor="middle" x="1190.37" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
|
||||||
<text text-anchor="middle" x="1190.37" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing -->
|
|
||||||
<g id="edge14" class="edge">
|
|
||||||
<title>SkippingPrefix.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M899.82,-36.01C864.18,-48.2 824.54,-68.57 802.37,-102 771.84,-148.02 779.31,-216.26 786.77,-253.8"/>
|
|
||||||
<polygon fill="black" stroke="black" points="783.43,-254.92 788.94,-263.97 790.28,-253.46 783.43,-254.92"/>
|
|
||||||
<text text-anchor="middle" x="899.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Processing->SkippingVersions.Idle -->
|
|
||||||
<g id="edge15" class="edge">
|
|
||||||
<title>SkippingVersions.Processing->SkippingVersions.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M283.88,-36.24C281.71,-50.87 276.4,-71.43 263.37,-84 258.07,-89.11 252.06,-93.48 245.62,-97.21"/>
|
|
||||||
<polygon fill="black" stroke="black" points="243.85,-94.19 236.61,-101.92 247.09,-100.39 243.85,-94.19"/>
|
|
||||||
<text text-anchor="middle" x="349.87" y="-72.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key)]</text>
|
|
||||||
<text text-anchor="middle" x="349.87" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing -->
|
|
||||||
<g id="edge16" class="edge">
|
|
||||||
<title>SkippingVersions.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M382.46,-36.08C396.72,-40.7 410.82,-46.57 423.37,-54 476.67,-85.57 487.28,-102.42 518.37,-156 539.39,-192.23 514.46,-218.85 546.37,-246 561.72,-259.06 598.56,-267.25 639.23,-272.39"/>
|
|
||||||
<polygon fill="black" stroke="black" points="639.01,-275.89 649.36,-273.59 639.84,-268.93 639.01,-275.89"/>
|
|
||||||
<text text-anchor="middle" x="590.37" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key)]</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
|
||||||
<g id="edge17" class="edge">
|
|
||||||
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M1536.41,-36.13C1544.73,-40.79 1552.27,-46.65 1558.37,-54 1585.64,-86.89 1597.89,-215.12 1568.37,-246 1547.29,-268.05 1167.71,-276.42 947.74,-279.43"/>
|
|
||||||
<polygon fill="black" stroke="black" points="947.67,-275.93 937.71,-279.57 947.76,-282.93 947.67,-275.93"/>
|
|
||||||
<text text-anchor="middle" x="1758.37" y="-123.8" font-family="Times,serif" font-size="14.00">[isVersionKey(key) and master(key) == PHDkey]</text>
|
|
||||||
<text text-anchor="middle" x="1758.37" y="-108.8" font-family="Times,serif" font-size="14.00">/ key <- master(key)</text>
|
|
||||||
</g>
|
|
||||||
<!-- WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing -->
|
|
||||||
<g id="edge18" class="edge">
|
|
||||||
<title>WaitVersionAfterPHD.Processing->NotSkippingPrefixNorVersions.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M1546.51,-21.25C1677.94,-26.54 1888.29,-44.09 1937.37,-102 1947.71,-114.21 1946.85,-125.11 1937.37,-138 1841.62,-268.08 1749.48,-218.23 1590.37,-246 1471.26,-266.79 1143.92,-275.5 947.77,-278.94"/>
|
|
||||||
<polygon fill="black" stroke="black" points="947.6,-275.44 937.66,-279.11 947.72,-282.44 947.6,-275.44"/>
|
|
||||||
<text text-anchor="middle" x="2124.87" y="-116.3" font-family="Times,serif" font-size="14.00">[not isVersionKey(key) or master(key) != PHDkey]</text>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 18 KiB |
|
@ -1,35 +0,0 @@
|
||||||
digraph {
|
|
||||||
node [shape="box",style="filled,rounded",fontsize=16,fixedsize=true,width=3];
|
|
||||||
edge [fontsize=14];
|
|
||||||
rankdir=TB;
|
|
||||||
|
|
||||||
START [shape="circle",width=0.2,label="",style="filled",fillcolor="black"]
|
|
||||||
END [shape="circle",width=0.2,label="",style="filled",fillcolor="black",peripheries=2]
|
|
||||||
|
|
||||||
node [fillcolor="lightgrey"];
|
|
||||||
"NotSkipping.Idle" [label="NotSkipping",group="NotSkipping"];
|
|
||||||
"NeverSkipping.Idle" [label="NeverSkipping",group="NeverSkipping"];
|
|
||||||
"NotSkippingPrefix.Idle" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
|
||||||
"SkippingPrefix.Idle" [label="SkippingPrefix",group="SkippingPrefix"];
|
|
||||||
|
|
||||||
node [fillcolor="lightblue"];
|
|
||||||
"NeverSkipping.Processing" [label="NeverSkipping",group="NeverSkipping"];
|
|
||||||
"NotSkippingPrefix.Processing" [label="NotSkippingPrefix",group="NotSkippingPrefix"];
|
|
||||||
"SkippingPrefix.Processing" [label="SkippingPrefix",group="SkippingPrefix"];
|
|
||||||
|
|
||||||
START -> "NotSkipping.Idle"
|
|
||||||
"NotSkipping.Idle" -> "NeverSkipping.Idle" [label="[delimiter == undefined]"]
|
|
||||||
"NotSkipping.Idle" -> "NotSkippingPrefix.Idle" [label="[delimiter == '/']"]
|
|
||||||
|
|
||||||
"NeverSkipping.Idle" -> "NeverSkipping.Processing" [label="filter(key, value)"]
|
|
||||||
"NotSkippingPrefix.Idle" -> "NotSkippingPrefix.Processing" [label="filter(key, value)"]
|
|
||||||
"SkippingPrefix.Idle" -> "SkippingPrefix.Processing" [label="filter(key, value)"]
|
|
||||||
|
|
||||||
"NeverSkipping.Processing" -> END [label="[nKeys == maxKeys]\n-> FILTER_END"]
|
|
||||||
"NeverSkipping.Processing" -> "NeverSkipping.Idle" [label="[nKeys < maxKeys]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
|
||||||
"NotSkippingPrefix.Processing" -> END [label="[nKeys == maxKeys]\n -> FILTER_END"]
|
|
||||||
"NotSkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[nKeys < maxKeys and hasDelimiter(key)]\n/ prefix <- prefixOf(key)\n/ CommonPrefixes.append(prefixOf(key))\n-> FILTER_ACCEPT"]
|
|
||||||
"NotSkippingPrefix.Processing" -> "NotSkippingPrefix.Idle" [label="[nKeys < maxKeys and not hasDelimiter(key)]\n/ Contents.append(key, value)\n -> FILTER_ACCEPT"]
|
|
||||||
"SkippingPrefix.Processing" -> "SkippingPrefix.Idle" [label="[key.startsWith(prefix)]\n-> FILTER_SKIP"]
|
|
||||||
"SkippingPrefix.Processing" -> "NotSkippingPrefix.Processing" [label="[not key.startsWith(prefix)]"]
|
|
||||||
}
|
|
|
@ -1,166 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
|
||||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
|
||||||
<!-- Generated by graphviz version 2.43.0 (0)
|
|
||||||
-->
|
|
||||||
<!-- Title: %3 Pages: 1 -->
|
|
||||||
<svg width="975pt" height="533pt"
|
|
||||||
viewBox="0.00 0.00 975.00 533.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
|
||||||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 529)">
|
|
||||||
<title>%3</title>
|
|
||||||
<polygon fill="white" stroke="transparent" points="-4,4 -4,-529 971,-529 971,4 -4,4"/>
|
|
||||||
<!-- START -->
|
|
||||||
<g id="node1" class="node">
|
|
||||||
<title>START</title>
|
|
||||||
<ellipse fill="black" stroke="black" cx="283" cy="-518" rx="7" ry="7"/>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkipping.Idle -->
|
|
||||||
<g id="node3" class="node">
|
|
||||||
<title>NotSkipping.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M379,-474C379,-474 187,-474 187,-474 181,-474 175,-468 175,-462 175,-462 175,-450 175,-450 175,-444 181,-438 187,-438 187,-438 379,-438 379,-438 385,-438 391,-444 391,-450 391,-450 391,-462 391,-462 391,-468 385,-474 379,-474"/>
|
|
||||||
<text text-anchor="middle" x="283" y="-452.2" font-family="Times,serif" font-size="16.00">NotSkipping</text>
|
|
||||||
</g>
|
|
||||||
<!-- START->NotSkipping.Idle -->
|
|
||||||
<g id="edge1" class="edge">
|
|
||||||
<title>START->NotSkipping.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M283,-510.58C283,-504.23 283,-494.07 283,-484.3"/>
|
|
||||||
<polygon fill="black" stroke="black" points="286.5,-484.05 283,-474.05 279.5,-484.05 286.5,-484.05"/>
|
|
||||||
</g>
|
|
||||||
<!-- END -->
|
|
||||||
<g id="node2" class="node">
|
|
||||||
<title>END</title>
|
|
||||||
<ellipse fill="black" stroke="black" cx="196" cy="-120" rx="7" ry="7"/>
|
|
||||||
<ellipse fill="none" stroke="black" cx="196" cy="-120" rx="11" ry="11"/>
|
|
||||||
</g>
|
|
||||||
<!-- NeverSkipping.Idle -->
|
|
||||||
<g id="node4" class="node">
|
|
||||||
<title>NeverSkipping.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M262,-387C262,-387 70,-387 70,-387 64,-387 58,-381 58,-375 58,-375 58,-363 58,-363 58,-357 64,-351 70,-351 70,-351 262,-351 262,-351 268,-351 274,-357 274,-363 274,-363 274,-375 274,-375 274,-381 268,-387 262,-387"/>
|
|
||||||
<text text-anchor="middle" x="166" y="-365.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkipping.Idle->NeverSkipping.Idle -->
|
|
||||||
<g id="edge2" class="edge">
|
|
||||||
<title>NotSkipping.Idle->NeverSkipping.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M216.5,-437.82C206.51,-433.18 196.91,-427.34 189,-420 182.25,-413.74 177.33,-405.11 173.81,-396.79"/>
|
|
||||||
<polygon fill="black" stroke="black" points="177.05,-395.47 170.3,-387.31 170.49,-397.9 177.05,-395.47"/>
|
|
||||||
<text text-anchor="middle" x="279.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == undefined]</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefix.Idle -->
|
|
||||||
<g id="node5" class="node">
|
|
||||||
<title>NotSkippingPrefix.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M496,-387C496,-387 304,-387 304,-387 298,-387 292,-381 292,-375 292,-375 292,-363 292,-363 292,-357 298,-351 304,-351 304,-351 496,-351 496,-351 502,-351 508,-357 508,-363 508,-363 508,-375 508,-375 508,-381 502,-387 496,-387"/>
|
|
||||||
<text text-anchor="middle" x="400" y="-365.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkipping.Idle->NotSkippingPrefix.Idle -->
|
|
||||||
<g id="edge3" class="edge">
|
|
||||||
<title>NotSkipping.Idle->NotSkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M340.77,-437.93C351.2,-433.2 361.45,-427.29 370,-420 377.58,-413.53 383.76,-404.65 388.51,-396.16"/>
|
|
||||||
<polygon fill="black" stroke="black" points="391.63,-397.74 393.08,-387.24 385.4,-394.54 391.63,-397.74"/>
|
|
||||||
<text text-anchor="middle" x="442.5" y="-408.8" font-family="Times,serif" font-size="14.00">[delimiter == '/']</text>
|
|
||||||
</g>
|
|
||||||
<!-- NeverSkipping.Processing -->
|
|
||||||
<g id="node7" class="node">
|
|
||||||
<title>NeverSkipping.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M204,-270C204,-270 12,-270 12,-270 6,-270 0,-264 0,-258 0,-258 0,-246 0,-246 0,-240 6,-234 12,-234 12,-234 204,-234 204,-234 210,-234 216,-240 216,-246 216,-246 216,-258 216,-258 216,-264 210,-270 204,-270"/>
|
|
||||||
<text text-anchor="middle" x="108" y="-248.2" font-family="Times,serif" font-size="16.00">NeverSkipping</text>
|
|
||||||
</g>
|
|
||||||
<!-- NeverSkipping.Idle->NeverSkipping.Processing -->
|
|
||||||
<g id="edge4" class="edge">
|
|
||||||
<title>NeverSkipping.Idle->NeverSkipping.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M64.1,-350.93C47.33,-346.11 33.58,-340.17 28,-333 15.72,-317.21 17.05,-304.74 28,-288 30.93,-283.52 34.58,-279.6 38.69,-276.19"/>
|
|
||||||
<polygon fill="black" stroke="black" points="40.97,-278.86 47.1,-270.22 36.92,-273.16 40.97,-278.86"/>
|
|
||||||
<text text-anchor="middle" x="86" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefix.Processing -->
|
|
||||||
<g id="node8" class="node">
|
|
||||||
<title>NotSkippingPrefix.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M554,-270C554,-270 362,-270 362,-270 356,-270 350,-264 350,-258 350,-258 350,-246 350,-246 350,-240 356,-234 362,-234 362,-234 554,-234 554,-234 560,-234 566,-240 566,-246 566,-246 566,-258 566,-258 566,-264 560,-270 554,-270"/>
|
|
||||||
<text text-anchor="middle" x="458" y="-248.2" font-family="Times,serif" font-size="16.00">NotSkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefix.Idle->NotSkippingPrefix.Processing -->
|
|
||||||
<g id="edge5" class="edge">
|
|
||||||
<title>NotSkippingPrefix.Idle->NotSkippingPrefix.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M395.69,-350.84C392.38,-333.75 390.03,-307.33 401,-288 403.42,-283.74 406.58,-279.94 410.19,-276.55"/>
|
|
||||||
<polygon fill="black" stroke="black" points="412.5,-279.18 418.1,-270.18 408.11,-273.73 412.5,-279.18"/>
|
|
||||||
<text text-anchor="middle" x="459" y="-306.8" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Idle -->
|
|
||||||
<g id="node6" class="node">
|
|
||||||
<title>SkippingPrefix.Idle</title>
|
|
||||||
<path fill="lightgrey" stroke="black" d="M554,-138C554,-138 362,-138 362,-138 356,-138 350,-132 350,-126 350,-126 350,-114 350,-114 350,-108 356,-102 362,-102 362,-102 554,-102 554,-102 560,-102 566,-108 566,-114 566,-114 566,-126 566,-126 566,-132 560,-138 554,-138"/>
|
|
||||||
<text text-anchor="middle" x="458" y="-116.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing -->
|
|
||||||
<g id="node9" class="node">
|
|
||||||
<title>SkippingPrefix.Processing</title>
|
|
||||||
<path fill="lightblue" stroke="black" d="M691,-36C691,-36 499,-36 499,-36 493,-36 487,-30 487,-24 487,-24 487,-12 487,-12 487,-6 493,0 499,0 499,0 691,0 691,0 697,0 703,-6 703,-12 703,-12 703,-24 703,-24 703,-30 697,-36 691,-36"/>
|
|
||||||
<text text-anchor="middle" x="595" y="-14.2" font-family="Times,serif" font-size="16.00">SkippingPrefix</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Idle->SkippingPrefix.Processing -->
|
|
||||||
<g id="edge6" class="edge">
|
|
||||||
<title>SkippingPrefix.Idle->SkippingPrefix.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M452.35,-101.95C448.76,-87.65 446.54,-67.45 457,-54 461.44,-48.29 471.08,-43.36 483.3,-39.15"/>
|
|
||||||
<polygon fill="black" stroke="black" points="484.61,-42.41 493.1,-36.07 482.51,-35.73 484.61,-42.41"/>
|
|
||||||
<text text-anchor="middle" x="515" y="-65.3" font-family="Times,serif" font-size="14.00">filter(key, value)</text>
|
|
||||||
</g>
|
|
||||||
<!-- NeverSkipping.Processing->END -->
|
|
||||||
<g id="edge7" class="edge">
|
|
||||||
<title>NeverSkipping.Processing->END</title>
|
|
||||||
<path fill="none" stroke="black" d="M102.91,-233.88C97.93,-213.45 93.18,-179.15 109,-156 123.79,-134.35 154.41,-126.09 175.08,-122.94"/>
|
|
||||||
<polygon fill="black" stroke="black" points="175.62,-126.4 185.11,-121.69 174.76,-119.45 175.62,-126.4"/>
|
|
||||||
<text text-anchor="middle" x="185" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
|
||||||
<text text-anchor="middle" x="185" y="-174.8" font-family="Times,serif" font-size="14.00">-> FILTER_END</text>
|
|
||||||
</g>
|
|
||||||
<!-- NeverSkipping.Processing->NeverSkipping.Idle -->
|
|
||||||
<g id="edge8" class="edge">
|
|
||||||
<title>NeverSkipping.Processing->NeverSkipping.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M129.49,-270.27C134.87,-275.48 140.18,-281.55 144,-288 153.56,-304.17 159.09,-324.63 162.21,-340.81"/>
|
|
||||||
<polygon fill="black" stroke="black" points="158.78,-341.49 163.94,-350.74 165.68,-340.29 158.78,-341.49"/>
|
|
||||||
<text text-anchor="middle" x="265.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys]</text>
|
|
||||||
<text text-anchor="middle" x="265.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
|
||||||
<text text-anchor="middle" x="265.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefix.Processing->END -->
|
|
||||||
<g id="edge9" class="edge">
|
|
||||||
<title>NotSkippingPrefix.Processing->END</title>
|
|
||||||
<path fill="none" stroke="black" d="M349.96,-237.93C333,-232.81 316.36,-225.74 302,-216 275.27,-197.87 285.01,-177.6 261,-156 247.64,-143.98 229.41,-134.62 215.65,-128.62"/>
|
|
||||||
<polygon fill="black" stroke="black" points="216.74,-125.28 206.16,-124.7 214.07,-131.75 216.74,-125.28"/>
|
|
||||||
<text text-anchor="middle" x="378" y="-189.8" font-family="Times,serif" font-size="14.00">[nKeys == maxKeys]</text>
|
|
||||||
<text text-anchor="middle" x="378" y="-174.8" font-family="Times,serif" font-size="14.00"> -> FILTER_END</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefix.Processing->NotSkippingPrefix.Idle -->
|
|
||||||
<g id="edge11" class="edge">
|
|
||||||
<title>NotSkippingPrefix.Processing->NotSkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M499.64,-270.11C506.59,-274.86 512.87,-280.76 517,-288 526.9,-305.38 528.94,-316.96 517,-333 513.56,-337.62 509.53,-341.66 505.07,-345.18"/>
|
|
||||||
<polygon fill="black" stroke="black" points="502.89,-342.43 496.63,-350.98 506.85,-348.2 502.89,-342.43"/>
|
|
||||||
<text text-anchor="middle" x="690.5" y="-321.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and not hasDelimiter(key)]</text>
|
|
||||||
<text text-anchor="middle" x="690.5" y="-306.8" font-family="Times,serif" font-size="14.00">/ Contents.append(key, value)</text>
|
|
||||||
<text text-anchor="middle" x="690.5" y="-291.8" font-family="Times,serif" font-size="14.00"> -> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- NotSkippingPrefix.Processing->SkippingPrefix.Idle -->
|
|
||||||
<g id="edge10" class="edge">
|
|
||||||
<title>NotSkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M458,-233.74C458,-211.98 458,-174.32 458,-148.56"/>
|
|
||||||
<polygon fill="black" stroke="black" points="461.5,-148.33 458,-138.33 454.5,-148.33 461.5,-148.33"/>
|
|
||||||
<text text-anchor="middle" x="609.5" y="-204.8" font-family="Times,serif" font-size="14.00">[nKeys < maxKeys and hasDelimiter(key)]</text>
|
|
||||||
<text text-anchor="middle" x="609.5" y="-189.8" font-family="Times,serif" font-size="14.00">/ prefix <- prefixOf(key)</text>
|
|
||||||
<text text-anchor="middle" x="609.5" y="-174.8" font-family="Times,serif" font-size="14.00">/ CommonPrefixes.append(prefixOf(key))</text>
|
|
||||||
<text text-anchor="middle" x="609.5" y="-159.8" font-family="Times,serif" font-size="14.00">-> FILTER_ACCEPT</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing->SkippingPrefix.Idle -->
|
|
||||||
<g id="edge12" class="edge">
|
|
||||||
<title>SkippingPrefix.Processing->SkippingPrefix.Idle</title>
|
|
||||||
<path fill="none" stroke="black" d="M593.49,-36.23C591.32,-50.84 586,-71.39 573,-84 567.75,-89.09 561.77,-93.45 555.38,-97.17"/>
|
|
||||||
<polygon fill="black" stroke="black" points="553.66,-94.12 546.43,-101.87 556.91,-100.32 553.66,-94.12"/>
|
|
||||||
<text text-anchor="middle" x="672" y="-72.8" font-family="Times,serif" font-size="14.00">[key.startsWith(prefix)]</text>
|
|
||||||
<text text-anchor="middle" x="672" y="-57.8" font-family="Times,serif" font-size="14.00">-> FILTER_SKIP</text>
|
|
||||||
</g>
|
|
||||||
<!-- SkippingPrefix.Processing->NotSkippingPrefix.Processing -->
|
|
||||||
<g id="edge13" class="edge">
|
|
||||||
<title>SkippingPrefix.Processing->NotSkippingPrefix.Processing</title>
|
|
||||||
<path fill="none" stroke="black" d="M703.16,-31.64C728.6,-36.87 750.75,-44.11 759,-54 778.46,-77.34 776.26,-200.01 762,-216 749.37,-230.17 656.13,-239.42 576.2,-244.84"/>
|
|
||||||
<polygon fill="black" stroke="black" points="575.77,-241.36 566.03,-245.51 576.24,-248.34 575.77,-241.36"/>
|
|
||||||
<text text-anchor="middle" x="870" y="-116.3" font-family="Times,serif" font-size="14.00">[not key.startsWith(prefix)]</text>
|
|
||||||
</g>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 12 KiB |
|
@ -1,28 +0,0 @@
|
||||||
{
|
|
||||||
"groups": {
|
|
||||||
"default": {
|
|
||||||
"packages": [
|
|
||||||
"lib/executables/pensieveCreds/package.json",
|
|
||||||
"package.json"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"branchPrefix": "improvement/greenkeeper.io/",
|
|
||||||
"commitMessages": {
|
|
||||||
"initialBadge": "docs(readme): add Greenkeeper badge",
|
|
||||||
"initialDependencies": "chore(package): update dependencies",
|
|
||||||
"initialBranches": "chore(bert-e): whitelist greenkeeper branches",
|
|
||||||
"dependencyUpdate": "fix(package): update ${dependency} to version ${version}",
|
|
||||||
"devDependencyUpdate": "chore(package): update ${dependency} to version ${version}",
|
|
||||||
"dependencyPin": "fix: pin ${dependency} to ${oldVersionResolved}",
|
|
||||||
"devDependencyPin": "chore: pin ${dependency} to ${oldVersionResolved}",
|
|
||||||
"closes": "\n\nCloses #${number}"
|
|
||||||
},
|
|
||||||
"ignore": [
|
|
||||||
"ajv",
|
|
||||||
"eslint",
|
|
||||||
"eslint-plugin-react",
|
|
||||||
"eslint-config-airbnb",
|
|
||||||
"eslint-config-scality"
|
|
||||||
]
|
|
||||||
}
|
|
32
index.ts
32
index.ts
|
@ -1,19 +1,14 @@
|
||||||
import * as evaluators from './lib/policyEvaluator/evaluator';
|
import * as evaluators from './lib/policyEvaluator/evaluator';
|
||||||
import evaluatePrincipal from './lib/policyEvaluator/principal';
|
import evaluatePrincipal from './lib/policyEvaluator/principal';
|
||||||
import RequestContext, {
|
import RequestContext from './lib/policyEvaluator/RequestContext';
|
||||||
actionNeedQuotaCheck,
|
|
||||||
actionNeedQuotaCheckCopy,
|
|
||||||
actionWithDataDeletion } from './lib/policyEvaluator/RequestContext';
|
|
||||||
import * as requestUtils from './lib/policyEvaluator/requestUtils';
|
import * as requestUtils from './lib/policyEvaluator/requestUtils';
|
||||||
import * as actionMaps from './lib/policyEvaluator/utils/actionMaps';
|
import * as actionMaps from './lib/policyEvaluator/utils/actionMaps';
|
||||||
import { validateUserPolicy } from './lib/policy/policyValidator'
|
import { validateUserPolicy } from './lib/policy/policyValidator'
|
||||||
import * as locationConstraints from './lib/patches/locationConstraints';
|
|
||||||
import * as userMetadata from './lib/s3middleware/userMetadata';
|
import * as userMetadata from './lib/s3middleware/userMetadata';
|
||||||
import convertToXml from './lib/s3middleware/convertToXml';
|
import convertToXml from './lib/s3middleware/convertToXml';
|
||||||
import escapeForXml from './lib/s3middleware/escapeForXml';
|
import escapeForXml from './lib/s3middleware/escapeForXml';
|
||||||
import * as objectLegalHold from './lib/s3middleware/objectLegalHold';
|
import * as objectLegalHold from './lib/s3middleware/objectLegalHold';
|
||||||
import * as tagging from './lib/s3middleware/tagging';
|
import * as tagging from './lib/s3middleware/tagging';
|
||||||
import { checkDateModifiedHeaders } from './lib/s3middleware/validateConditionalHeaders';
|
|
||||||
import { validateConditionalHeaders } from './lib/s3middleware/validateConditionalHeaders';
|
import { validateConditionalHeaders } from './lib/s3middleware/validateConditionalHeaders';
|
||||||
import MD5Sum from './lib/s3middleware/MD5Sum';
|
import MD5Sum from './lib/s3middleware/MD5Sum';
|
||||||
import NullStream from './lib/s3middleware/nullStream';
|
import NullStream from './lib/s3middleware/nullStream';
|
||||||
|
@ -21,14 +16,11 @@ import * as objectUtils from './lib/s3middleware/objectUtils';
|
||||||
import * as mpuUtils from './lib/s3middleware/azureHelpers/mpuUtils';
|
import * as mpuUtils from './lib/s3middleware/azureHelpers/mpuUtils';
|
||||||
import ResultsCollector from './lib/s3middleware/azureHelpers/ResultsCollector';
|
import ResultsCollector from './lib/s3middleware/azureHelpers/ResultsCollector';
|
||||||
import SubStreamInterface from './lib/s3middleware/azureHelpers/SubStreamInterface';
|
import SubStreamInterface from './lib/s3middleware/azureHelpers/SubStreamInterface';
|
||||||
import { prepareStream } from './lib/s3middleware/prepareStream';
|
|
||||||
import * as processMpuParts from './lib/s3middleware/processMpuParts';
|
import * as processMpuParts from './lib/s3middleware/processMpuParts';
|
||||||
import * as retention from './lib/s3middleware/objectRetention';
|
import * as retention from './lib/s3middleware/objectRetention';
|
||||||
import * as objectRestore from './lib/s3middleware/objectRestore';
|
|
||||||
import * as lifecycleHelpers from './lib/s3middleware/lifecycleHelpers';
|
import * as lifecycleHelpers from './lib/s3middleware/lifecycleHelpers';
|
||||||
export { default as errors } from './lib/errors';
|
export { default as errors } from './lib/errors';
|
||||||
export { default as Clustering } from './lib/Clustering';
|
export { default as Clustering } from './lib/Clustering';
|
||||||
export * as ClusterRPC from './lib/clustering/ClusterRPC';
|
|
||||||
export * as ipCheck from './lib/ipCheck';
|
export * as ipCheck from './lib/ipCheck';
|
||||||
export * as auth from './lib/auth/auth';
|
export * as auth from './lib/auth/auth';
|
||||||
export * as constants from './lib/constants';
|
export * as constants from './lib/constants';
|
||||||
|
@ -41,19 +33,21 @@ export * as stream from './lib/stream';
|
||||||
export * as jsutil from './lib/jsutil';
|
export * as jsutil from './lib/jsutil';
|
||||||
export { default as stringHash } from './lib/stringHash';
|
export { default as stringHash } from './lib/stringHash';
|
||||||
export * as db from './lib/db';
|
export * as db from './lib/db';
|
||||||
export * as errorUtils from './lib/errorUtils';
|
|
||||||
export { default as shuffle } from './lib/shuffle';
|
export { default as shuffle } from './lib/shuffle';
|
||||||
export * as models from './lib/models';
|
export * as models from './lib/models';
|
||||||
|
|
||||||
export const algorithms = {
|
export const algorithms = {
|
||||||
list: require('./lib/algos/list/exportAlgos'),
|
list: {
|
||||||
|
Basic: require('./lib/algos/list/basic').List,
|
||||||
|
Delimiter: require('./lib/algos/list/delimiter').Delimiter,
|
||||||
|
DelimiterVersions: require('./lib/algos/list/delimiterVersions').DelimiterVersions,
|
||||||
|
DelimiterMaster: require('./lib/algos/list/delimiterMaster').DelimiterMaster,
|
||||||
|
MPU: require('./lib/algos/list/MPU').MultipartUploads,
|
||||||
|
},
|
||||||
listTools: {
|
listTools: {
|
||||||
DelimiterTools: require('./lib/algos/list/tools'),
|
DelimiterTools: require('./lib/algos/list/tools'),
|
||||||
Skip: require('./lib/algos/list/skip'),
|
|
||||||
},
|
},
|
||||||
cache: {
|
cache: {
|
||||||
GapSet: require('./lib/algos/cache/GapSet'),
|
|
||||||
GapCache: require('./lib/algos/cache/GapCache'),
|
|
||||||
LRUCache: require('./lib/algos/cache/LRUCache'),
|
LRUCache: require('./lib/algos/cache/LRUCache'),
|
||||||
},
|
},
|
||||||
stream: {
|
stream: {
|
||||||
|
@ -70,9 +64,6 @@ export const policies = {
|
||||||
RequestContext,
|
RequestContext,
|
||||||
requestUtils,
|
requestUtils,
|
||||||
actionMaps,
|
actionMaps,
|
||||||
actionNeedQuotaCheck,
|
|
||||||
actionWithDataDeletion,
|
|
||||||
actionNeedQuotaCheckCopy,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const testing = {
|
export const testing = {
|
||||||
|
@ -85,7 +76,6 @@ export const s3middleware = {
|
||||||
escapeForXml,
|
escapeForXml,
|
||||||
objectLegalHold,
|
objectLegalHold,
|
||||||
tagging,
|
tagging,
|
||||||
checkDateModifiedHeaders,
|
|
||||||
validateConditionalHeaders,
|
validateConditionalHeaders,
|
||||||
MD5Sum,
|
MD5Sum,
|
||||||
NullStream,
|
NullStream,
|
||||||
|
@ -95,10 +85,8 @@ export const s3middleware = {
|
||||||
ResultsCollector,
|
ResultsCollector,
|
||||||
SubStreamInterface,
|
SubStreamInterface,
|
||||||
},
|
},
|
||||||
prepareStream,
|
|
||||||
processMpuParts,
|
processMpuParts,
|
||||||
retention,
|
retention,
|
||||||
objectRestore,
|
|
||||||
lifecycleHelpers,
|
lifecycleHelpers,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -169,7 +157,3 @@ export const storage = {
|
||||||
export const pensieve = {
|
export const pensieve = {
|
||||||
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
|
credentialUtils: require('./lib/executables/pensieveCreds/utils'),
|
||||||
};
|
};
|
||||||
|
|
||||||
export const patches = {
|
|
||||||
locationConstraints,
|
|
||||||
};
|
|
||||||
|
|
|
@ -1,363 +0,0 @@
|
||||||
import { OrderedSet } from '@js-sdsl/ordered-set';
|
|
||||||
import {
|
|
||||||
default as GapSet,
|
|
||||||
GapSetEntry,
|
|
||||||
} from './GapSet';
|
|
||||||
|
|
||||||
// the API is similar but is not strictly a superset of GapSetInterface
|
|
||||||
// so we don't extend from it
|
|
||||||
export interface GapCacheInterface {
|
|
||||||
exposureDelayMs: number;
|
|
||||||
maxGapWeight: number;
|
|
||||||
size: number;
|
|
||||||
|
|
||||||
setGap: (firstKey: string, lastKey: string, weight: number) => void;
|
|
||||||
removeOverlappingGaps: (overlappingKeys: string[]) => number;
|
|
||||||
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
|
|
||||||
[Symbol.iterator]: () => Iterator<GapSetEntry>;
|
|
||||||
toArray: () => GapSetEntry[];
|
|
||||||
};
|
|
||||||
|
|
||||||
class GapCacheUpdateSet {
|
|
||||||
newGaps: GapSet;
|
|
||||||
updatedKeys: OrderedSet<string>;
|
|
||||||
|
|
||||||
constructor(maxGapWeight: number) {
|
|
||||||
this.newGaps = new GapSet(maxGapWeight);
|
|
||||||
this.updatedKeys = new OrderedSet();
|
|
||||||
}
|
|
||||||
|
|
||||||
addUpdateBatch(updatedKeys: OrderedSet<string>): void {
|
|
||||||
this.updatedKeys.union(updatedKeys);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Cache of listing "gaps" i.e. ranges of keys that can be skipped
|
|
||||||
* over during listing (because they only contain delete markers as
|
|
||||||
* latest versions).
|
|
||||||
*
|
|
||||||
* Typically, a single GapCache instance would be attached to a raft session.
|
|
||||||
*
|
|
||||||
* The API usage is as follows:
|
|
||||||
*
|
|
||||||
* - Initialize a GapCache instance by calling start() (this starts an internal timer)
|
|
||||||
*
|
|
||||||
* - Insert a gap or update an existing one via setGap()
|
|
||||||
*
|
|
||||||
* - Lookup existing gaps via lookupGap()
|
|
||||||
*
|
|
||||||
* - Invalidate gaps that overlap a specific set of keys via removeOverlappingGaps()
|
|
||||||
*
|
|
||||||
* - Shut down a GapCache instance by calling stop() (this stops the internal timer)
|
|
||||||
*
|
|
||||||
* Gaps inserted via setGap() are not exposed immediately to lookupGap(), but only:
|
|
||||||
*
|
|
||||||
* - after a certain delay always larger than 'exposureDelayMs' and usually shorter
|
|
||||||
* than twice this value (but might be slightly longer in rare cases)
|
|
||||||
*
|
|
||||||
* - and only if they haven't been invalidated by a recent call to removeOverlappingGaps()
|
|
||||||
*
|
|
||||||
* This ensures atomicity between gap creation and invalidation from updates under
|
|
||||||
* the condition that a gap is created from first key to last key within the time defined
|
|
||||||
* by 'exposureDelayMs'.
|
|
||||||
*
|
|
||||||
* The implementation is based on two extra temporary "update sets" on top of the main
|
|
||||||
* exposed gap set, one called "staging" and the other "frozen", each containing a
|
|
||||||
* temporary updated gap set and a list of updated keys to invalidate gaps with (coming
|
|
||||||
* from calls to removeOverlappingGaps()). Every "exposureDelayMs" milliseconds, the frozen
|
|
||||||
* gaps are invalidated by all key updates coming from either of the "staging" or "frozen"
|
|
||||||
* update set, then merged into the exposed gaps set, after which the staging updates become
|
|
||||||
* the frozen updates and won't receive any new gap until the next cycle.
|
|
||||||
*/
|
|
||||||
export default class GapCache implements GapCacheInterface {
|
|
||||||
_exposureDelayMs: number;
|
|
||||||
maxGaps: number;
|
|
||||||
|
|
||||||
_stagingUpdates: GapCacheUpdateSet;
|
|
||||||
_frozenUpdates: GapCacheUpdateSet;
|
|
||||||
_exposedGaps: GapSet;
|
|
||||||
_exposeFrozenInterval: NodeJS.Timeout | null;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
*
|
|
||||||
* @param {number} exposureDelayMs - minimum delay between
|
|
||||||
* insertion of a gap via setGap() and its exposure via
|
|
||||||
* lookupGap()
|
|
||||||
* @param {number} maxGaps - maximum number of cached gaps, after
|
|
||||||
* which no new gap can be added by setGap(). (Note: a future
|
|
||||||
* improvement could replace this by an eviction strategy)
|
|
||||||
* @param {number} maxGapWeight - maximum "weight" of individual
|
|
||||||
* cached gaps, which is also the granularity for
|
|
||||||
* invalidation. Individual gaps can be chained together,
|
|
||||||
* which lookupGap() transparently consolidates in the response
|
|
||||||
* into a single large gap.
|
|
||||||
*/
|
|
||||||
constructor(exposureDelayMs: number, maxGaps: number, maxGapWeight: number) {
|
|
||||||
this._exposureDelayMs = exposureDelayMs;
|
|
||||||
this.maxGaps = maxGaps;
|
|
||||||
|
|
||||||
this._stagingUpdates = new GapCacheUpdateSet(maxGapWeight);
|
|
||||||
this._frozenUpdates = new GapCacheUpdateSet(maxGapWeight);
|
|
||||||
this._exposedGaps = new GapSet(maxGapWeight);
|
|
||||||
this._exposeFrozenInterval = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a GapCache from an array of exposed gap entries (used in tests)
|
|
||||||
*
|
|
||||||
* @return {GapCache} - a new GapCache instance
|
|
||||||
*/
|
|
||||||
static createFromArray(
|
|
||||||
gaps: GapSetEntry[],
|
|
||||||
exposureDelayMs: number,
|
|
||||||
maxGaps: number,
|
|
||||||
maxGapWeight: number
|
|
||||||
): GapCache {
|
|
||||||
const gapCache = new GapCache(exposureDelayMs, maxGaps, maxGapWeight);
|
|
||||||
gapCache._exposedGaps = GapSet.createFromArray(gaps, maxGapWeight)
|
|
||||||
return gapCache;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Internal helper to remove gaps in the staging and frozen sets
|
|
||||||
* overlapping with previously updated keys, right before the
|
|
||||||
* frozen gaps get exposed.
|
|
||||||
*
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
_removeOverlappingGapsBeforeExpose(): void {
|
|
||||||
for (const { updatedKeys } of [this._stagingUpdates, this._frozenUpdates]) {
|
|
||||||
if (updatedKeys.size() === 0) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
for (const { newGaps } of [this._stagingUpdates, this._frozenUpdates]) {
|
|
||||||
if (newGaps.size === 0) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
newGaps.removeOverlappingGaps(updatedKeys);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function is the core mechanism that updates the exposed gaps in the
|
|
||||||
* cache. It is called on a regular interval defined by 'exposureDelayMs'.
|
|
||||||
*
|
|
||||||
* It does the following in order:
|
|
||||||
*
|
|
||||||
* - remove gaps from the frozen set that overlap with any key present in a
|
|
||||||
* batch passed to removeOverlappingGaps() since the last two triggers of
|
|
||||||
* _exposeFrozen()
|
|
||||||
*
|
|
||||||
* - merge the remaining gaps from the frozen set to the exposed set, which
|
|
||||||
* makes them visible from calls to lookupGap()
|
|
||||||
*
|
|
||||||
* - rotate by freezing the currently staging updates and initiating a new
|
|
||||||
* staging updates set
|
|
||||||
*
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
_exposeFrozen(): void {
|
|
||||||
this._removeOverlappingGapsBeforeExpose();
|
|
||||||
for (const gap of this._frozenUpdates.newGaps) {
|
|
||||||
// Use a trivial strategy to keep the cache size within
|
|
||||||
// limits: refuse to add new gaps when the size is above
|
|
||||||
// the 'maxGaps' threshold. We solely rely on
|
|
||||||
// removeOverlappingGaps() to make space for new gaps.
|
|
||||||
if (this._exposedGaps.size < this.maxGaps) {
|
|
||||||
this._exposedGaps.setGap(gap.firstKey, gap.lastKey, gap.weight);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this._frozenUpdates = this._stagingUpdates;
|
|
||||||
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Start the internal GapCache timer
|
|
||||||
*
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
start(): void {
|
|
||||||
if (this._exposeFrozenInterval) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
this._exposeFrozenInterval = setInterval(
|
|
||||||
() => this._exposeFrozen(),
|
|
||||||
this._exposureDelayMs);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Stop the internal GapCache timer
|
|
||||||
*
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
stop(): void {
|
|
||||||
if (this._exposeFrozenInterval) {
|
|
||||||
clearInterval(this._exposeFrozenInterval);
|
|
||||||
this._exposeFrozenInterval = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Record a gap between two keys, associated with a weight to
|
|
||||||
* limit individual gap's spanning ranges in the cache, for a more
|
|
||||||
* granular invalidation.
|
|
||||||
*
|
|
||||||
* The function handles splitting and merging existing gaps to
|
|
||||||
* maintain an optimal weight of cache entries.
|
|
||||||
*
|
|
||||||
* NOTE 1: the caller must ensure that the full length of the gap
|
|
||||||
* between 'firstKey' and 'lastKey' has been built from a listing
|
|
||||||
* snapshot that is more recent than 'exposureDelayMs' milliseconds,
|
|
||||||
* in order to guarantee that the exposed gap will be fully
|
|
||||||
* covered (and potentially invalidated) from recent calls to
|
|
||||||
* removeOverlappingGaps().
|
|
||||||
*
|
|
||||||
* NOTE 2: a usual pattern when building a large gap from multiple
|
|
||||||
* calls to setGap() is to start the next gap from 'lastKey',
|
|
||||||
* which will be passed as 'firstKey' in the next call, so that
|
|
||||||
* gaps can be chained together and consolidated by lookupGap().
|
|
||||||
*
|
|
||||||
* @param {string} firstKey - first key of the gap
|
|
||||||
* @param {string} lastKey - last key of the gap, must be greater
|
|
||||||
* or equal than 'firstKey'
|
|
||||||
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
setGap(firstKey: string, lastKey: string, weight: number): void {
|
|
||||||
this._stagingUpdates.newGaps.setGap(firstKey, lastKey, weight);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove gaps that overlap with a given set of keys. Used to
|
|
||||||
* invalidate gaps when keys are inserted or deleted.
|
|
||||||
*
|
|
||||||
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that
|
|
||||||
* overlap with any of this set of keys
|
|
||||||
* @return {number} - how many gaps were removed from the exposed
|
|
||||||
* gaps only (overlapping gaps not yet exposed are also invalidated
|
|
||||||
* but are not accounted for in the returned value)
|
|
||||||
*/
|
|
||||||
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
|
|
||||||
let overlappingKeysSet;
|
|
||||||
if (Array.isArray(overlappingKeys)) {
|
|
||||||
overlappingKeysSet = new OrderedSet(overlappingKeys);
|
|
||||||
} else {
|
|
||||||
overlappingKeysSet = overlappingKeys;
|
|
||||||
}
|
|
||||||
this._stagingUpdates.addUpdateBatch(overlappingKeysSet);
|
|
||||||
return this._exposedGaps.removeOverlappingGaps(overlappingKeysSet);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Lookup the next exposed gap that overlaps with [minKey, maxKey]. Internally
|
|
||||||
* chained gaps are coalesced in the response into a single contiguous large gap.
|
|
||||||
*
|
|
||||||
* @param {string} minKey - minimum key overlapping with the returned gap
|
|
||||||
* @param {string} [maxKey] - maximum key overlapping with the returned gap
|
|
||||||
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
|
|
||||||
* was found, null otherwise, as a Promise
|
|
||||||
*/
|
|
||||||
lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
|
|
||||||
return this._exposedGaps.lookupGap(minKey, maxKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the maximum weight setting for individual gaps.
|
|
||||||
*
|
|
||||||
* @return {number} - maximum weight of individual gaps
|
|
||||||
*/
|
|
||||||
get maxGapWeight(): number {
|
|
||||||
return this._exposedGaps.maxWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the maximum weight setting for individual gaps.
|
|
||||||
*
|
|
||||||
* @param {number} gapWeight - maximum weight of individual gaps
|
|
||||||
*/
|
|
||||||
set maxGapWeight(gapWeight: number) {
|
|
||||||
this._exposedGaps.maxWeight = gapWeight;
|
|
||||||
// also update transient gap sets
|
|
||||||
this._stagingUpdates.newGaps.maxWeight = gapWeight;
|
|
||||||
this._frozenUpdates.newGaps.maxWeight = gapWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the exposure delay in milliseconds, which is the minimum
|
|
||||||
* time after which newly cached gaps will be exposed by
|
|
||||||
* lookupGap().
|
|
||||||
*
|
|
||||||
* @return {number} - exposure delay in milliseconds
|
|
||||||
*/
|
|
||||||
get exposureDelayMs(): number {
|
|
||||||
return this._exposureDelayMs;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the exposure delay in milliseconds, which is the minimum
|
|
||||||
* time after which newly cached gaps will be exposed by
|
|
||||||
* lookupGap(). Setting this attribute automatically updates the
|
|
||||||
* internal state to honor the new value.
|
|
||||||
*
|
|
||||||
* @param {number} - exposure delay in milliseconds
|
|
||||||
*/
|
|
||||||
set exposureDelayMs(exposureDelayMs: number) {
|
|
||||||
if (exposureDelayMs !== this._exposureDelayMs) {
|
|
||||||
this._exposureDelayMs = exposureDelayMs;
|
|
||||||
if (this._exposeFrozenInterval) {
|
|
||||||
// invalidate all pending gap updates, as the new interval may not be
|
|
||||||
// safe for them
|
|
||||||
this._stagingUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
|
||||||
this._frozenUpdates = new GapCacheUpdateSet(this.maxGapWeight);
|
|
||||||
|
|
||||||
// reinitialize the _exposeFrozenInterval timer with the updated delay
|
|
||||||
this.stop();
|
|
||||||
this.start();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the number of exposed gaps
|
|
||||||
*
|
|
||||||
* @return {number} number of exposed gaps
|
|
||||||
*/
|
|
||||||
get size(): number {
|
|
||||||
return this._exposedGaps.size;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Iterate over exposed gaps
|
|
||||||
*
|
|
||||||
* @return {Iterator<GapSetEntry>} an iterator over exposed gaps
|
|
||||||
*/
|
|
||||||
[Symbol.iterator](): Iterator<GapSetEntry> {
|
|
||||||
return this._exposedGaps[Symbol.iterator]();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get an array of all exposed gaps
|
|
||||||
*
|
|
||||||
* @return {GapSetEntry[]} array of exposed gaps
|
|
||||||
*/
|
|
||||||
toArray(): GapSetEntry[] {
|
|
||||||
return this._exposedGaps.toArray();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear all exposed and staging gaps from the cache.
|
|
||||||
*
|
|
||||||
* Note: retains invalidating updates from removeOverlappingGaps()
|
|
||||||
* for correctness of gaps inserted afterwards.
|
|
||||||
*
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
clear(): void {
|
|
||||||
this._stagingUpdates.newGaps = new GapSet(this.maxGapWeight);
|
|
||||||
this._frozenUpdates.newGaps = new GapSet(this.maxGapWeight);
|
|
||||||
this._exposedGaps = new GapSet(this.maxGapWeight);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,366 +0,0 @@
|
||||||
import assert from 'assert';
|
|
||||||
import { OrderedSet } from '@js-sdsl/ordered-set';
|
|
||||||
|
|
||||||
import errors from '../../errors';
|
|
||||||
|
|
||||||
export type GapSetEntry = {
|
|
||||||
firstKey: string,
|
|
||||||
lastKey: string,
|
|
||||||
weight: number,
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface GapSetInterface {
|
|
||||||
maxWeight: number;
|
|
||||||
size: number;
|
|
||||||
|
|
||||||
setGap: (firstKey: string, lastKey: string, weight: number) => GapSetEntry;
|
|
||||||
removeOverlappingGaps: (overlappingKeys: string[]) => number;
|
|
||||||
lookupGap: (minKey: string, maxKey?: string) => Promise<GapSetEntry | null>;
|
|
||||||
[Symbol.iterator]: () => Iterator<GapSetEntry>;
|
|
||||||
toArray: () => GapSetEntry[];
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Specialized data structure to support caching of listing "gaps",
|
|
||||||
* i.e. ranges of keys that can be skipped over during listing
|
|
||||||
* (because they only contain delete markers as latest versions)
|
|
||||||
*/
|
|
||||||
export default class GapSet implements GapSetInterface, Iterable<GapSetEntry> {
|
|
||||||
_gaps: OrderedSet<GapSetEntry>;
|
|
||||||
_maxWeight: number;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
|
|
||||||
* @param {number} maxWeight - weight threshold for each cached
|
|
||||||
* gap (unitless). Triggers splitting gaps when reached
|
|
||||||
*/
|
|
||||||
constructor(maxWeight: number) {
|
|
||||||
this._gaps = new OrderedSet(
|
|
||||||
[],
|
|
||||||
(left: GapSetEntry, right: GapSetEntry) => (
|
|
||||||
left.firstKey < right.firstKey ? -1 :
|
|
||||||
left.firstKey > right.firstKey ? 1 : 0
|
|
||||||
)
|
|
||||||
);
|
|
||||||
this._maxWeight = maxWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a GapSet from an array of gap entries (used in tests)
|
|
||||||
*/
|
|
||||||
static createFromArray(gaps: GapSetEntry[], maxWeight: number): GapSet {
|
|
||||||
const gapSet = new GapSet(maxWeight);
|
|
||||||
for (const gap of gaps) {
|
|
||||||
gapSet._gaps.insert(gap);
|
|
||||||
}
|
|
||||||
return gapSet;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Record a gap between two keys, associated with a weight to limit
|
|
||||||
* individual gap sizes in the cache.
|
|
||||||
*
|
|
||||||
* The function handles splitting and merging existing gaps to
|
|
||||||
* maintain an optimal weight of cache entries.
|
|
||||||
*
|
|
||||||
* @param {string} firstKey - first key of the gap
|
|
||||||
* @param {string} lastKey - last key of the gap, must be greater
|
|
||||||
* or equal than 'firstKey'
|
|
||||||
* @param {number} weight - total weight between 'firstKey' and 'lastKey'
|
|
||||||
* @return {GapSetEntry} - existing or new gap entry
|
|
||||||
*/
|
|
||||||
setGap(firstKey: string, lastKey: string, weight: number): GapSetEntry {
|
|
||||||
assert(lastKey >= firstKey);
|
|
||||||
|
|
||||||
// Step 1/4: Find the closest left-overlapping gap, and either re-use it
|
|
||||||
// or chain it with a new gap depending on the weights if it exists (otherwise
|
|
||||||
// just creates a new gap).
|
|
||||||
const curGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey });
|
|
||||||
let curGap;
|
|
||||||
if (curGapIt.isAccessible()) {
|
|
||||||
curGap = curGapIt.pointer;
|
|
||||||
if (curGap.lastKey >= lastKey) {
|
|
||||||
// return fully overlapping gap already cached
|
|
||||||
return curGap;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let remainingWeight = weight;
|
|
||||||
if (!curGap // no previous gap
|
|
||||||
|| curGap.lastKey < firstKey // previous gap not overlapping
|
|
||||||
|| (curGap.lastKey === firstKey // previous gap overlapping by one key...
|
|
||||||
&& curGap.weight + weight > this._maxWeight) // ...but we can't extend it
|
|
||||||
) {
|
|
||||||
// create a new gap indexed by 'firstKey'
|
|
||||||
curGap = { firstKey, lastKey: firstKey, weight: 0 };
|
|
||||||
this._gaps.insert(curGap);
|
|
||||||
} else if (curGap.lastKey > firstKey && weight > this._maxWeight) {
|
|
||||||
// previous gap is either fully or partially contained in the new gap
|
|
||||||
// and cannot be extended: substract its weight from the total (heuristic
|
|
||||||
// in case the previous gap doesn't start at 'firstKey', which is the
|
|
||||||
// uncommon case)
|
|
||||||
remainingWeight -= curGap.weight;
|
|
||||||
|
|
||||||
// there may be an existing chained gap starting with the previous gap's
|
|
||||||
// 'lastKey': use it if it exists
|
|
||||||
const chainedGapIt = this._gaps.find(<GapSetEntry>{ firstKey: curGap.lastKey });
|
|
||||||
if (chainedGapIt.isAccessible()) {
|
|
||||||
curGap = chainedGapIt.pointer;
|
|
||||||
} else {
|
|
||||||
// no existing chained gap: chain a new gap to the previous gap
|
|
||||||
curGap = {
|
|
||||||
firstKey: curGap.lastKey,
|
|
||||||
lastKey: curGap.lastKey,
|
|
||||||
weight: 0,
|
|
||||||
};
|
|
||||||
this._gaps.insert(curGap);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Step 2/4: Cleanup existing gaps fully included in firstKey -> lastKey, and
|
|
||||||
// aggregate their weights in curGap to define the minimum weight up to the
|
|
||||||
// last merged gap.
|
|
||||||
let nextGap;
|
|
||||||
while (true) {
|
|
||||||
const nextGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: curGap.firstKey });
|
|
||||||
nextGap = nextGapIt.isAccessible() && nextGapIt.pointer;
|
|
||||||
// stop the cleanup when no more gap or if the next gap is not fully
|
|
||||||
// included in curGap
|
|
||||||
if (!nextGap || nextGap.lastKey > lastKey) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
this._gaps.eraseElementByIterator(nextGapIt);
|
|
||||||
curGap.lastKey = nextGap.lastKey;
|
|
||||||
curGap.weight += nextGap.weight;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 3/4: Extend curGap to lastKey, adjusting the weight.
|
|
||||||
// At this point, curGap weight is the minimum weight of the finished gap, save it
|
|
||||||
// for step 4.
|
|
||||||
let minMergedWeight = curGap.weight;
|
|
||||||
if (curGap.lastKey === firstKey && firstKey !== lastKey) {
|
|
||||||
// extend the existing gap by the full amount 'firstKey -> lastKey'
|
|
||||||
curGap.lastKey = lastKey;
|
|
||||||
curGap.weight += remainingWeight;
|
|
||||||
} else if (curGap.lastKey <= lastKey) {
|
|
||||||
curGap.lastKey = lastKey;
|
|
||||||
curGap.weight = remainingWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 4/4: Find the closest right-overlapping gap, and if it exists, either merge
|
|
||||||
// it or chain it with curGap depending on the weights.
|
|
||||||
if (nextGap && nextGap.firstKey <= lastKey) {
|
|
||||||
// nextGap overlaps with the new gap: check if we can merge it
|
|
||||||
minMergedWeight += nextGap.weight;
|
|
||||||
let mergedWeight;
|
|
||||||
if (lastKey === nextGap.firstKey) {
|
|
||||||
// nextGap is chained with curGap: add the full weight of nextGap
|
|
||||||
mergedWeight = curGap.weight + nextGap.weight;
|
|
||||||
} else {
|
|
||||||
// strict overlap: don't add nextGap's weight unless
|
|
||||||
// it's larger than the sum of merged ranges (as it is
|
|
||||||
// then included in `minMergedWeight`)
|
|
||||||
mergedWeight = Math.max(curGap.weight, minMergedWeight);
|
|
||||||
}
|
|
||||||
if (mergedWeight <= this._maxWeight) {
|
|
||||||
// merge nextGap into curGap
|
|
||||||
curGap.lastKey = nextGap.lastKey;
|
|
||||||
curGap.weight = mergedWeight;
|
|
||||||
this._gaps.eraseElementByKey(nextGap);
|
|
||||||
} else {
|
|
||||||
// adjust the last key to chain with nextGap and substract the next
|
|
||||||
// gap's weight from curGap (heuristic)
|
|
||||||
curGap.lastKey = nextGap.firstKey;
|
|
||||||
curGap.weight = Math.max(mergedWeight - nextGap.weight, 0);
|
|
||||||
curGap = nextGap;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// return a copy of curGap
|
|
||||||
return Object.assign({}, curGap);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove gaps that overlap with one or more keys in a given array or
|
|
||||||
* OrderedSet. Used to invalidate gaps when keys are inserted or deleted.
|
|
||||||
*
|
|
||||||
* @param {OrderedSet<string> | string[]} overlappingKeys - remove gaps that overlap
|
|
||||||
* with any of this set of keys
|
|
||||||
* @return {number} - how many gaps were removed
|
|
||||||
*/
|
|
||||||
removeOverlappingGaps(overlappingKeys: OrderedSet<string> | string[]): number {
|
|
||||||
// To optimize processing with a large number of keys and/or gaps, this function:
|
|
||||||
//
|
|
||||||
// 1. converts the overlappingKeys array to a OrderedSet (if not already a OrderedSet)
|
|
||||||
// 2. queries both the gaps set and the overlapping keys set in a loop, which allows:
|
|
||||||
// - skipping ranges of overlapping keys at once when there is no new overlapping gap
|
|
||||||
// - skipping ranges of gaps at once when there is no overlapping key
|
|
||||||
//
|
|
||||||
// This way, it is efficient when the number of non-overlapping gaps is large
|
|
||||||
// (which is the most common case in practice).
|
|
||||||
|
|
||||||
let overlappingKeysSet;
|
|
||||||
if (Array.isArray(overlappingKeys)) {
|
|
||||||
overlappingKeysSet = new OrderedSet(overlappingKeys);
|
|
||||||
} else {
|
|
||||||
overlappingKeysSet = overlappingKeys;
|
|
||||||
}
|
|
||||||
const firstKeyIt = overlappingKeysSet.begin();
|
|
||||||
let currentKey = firstKeyIt.isAccessible() && firstKeyIt.pointer;
|
|
||||||
let nRemoved = 0;
|
|
||||||
while (currentKey) {
|
|
||||||
const closestGapIt = this._gaps.reverseUpperBound(<GapSetEntry>{ firstKey: currentKey });
|
|
||||||
if (closestGapIt.isAccessible()) {
|
|
||||||
const closestGap = closestGapIt.pointer;
|
|
||||||
if (currentKey <= closestGap.lastKey) {
|
|
||||||
// currentKey overlaps closestGap: remove the gap
|
|
||||||
this._gaps.eraseElementByIterator(closestGapIt);
|
|
||||||
nRemoved += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const nextGapIt = this._gaps.lowerBound(<GapSetEntry>{ firstKey: currentKey });
|
|
||||||
if (!nextGapIt.isAccessible()) {
|
|
||||||
// no more gap: we're done
|
|
||||||
return nRemoved;
|
|
||||||
}
|
|
||||||
const nextGap = nextGapIt.pointer;
|
|
||||||
// advance to the last key potentially overlapping with nextGap
|
|
||||||
let currentKeyIt = overlappingKeysSet.reverseLowerBound(nextGap.lastKey);
|
|
||||||
if (currentKeyIt.isAccessible()) {
|
|
||||||
currentKey = currentKeyIt.pointer;
|
|
||||||
if (currentKey >= nextGap.firstKey) {
|
|
||||||
// currentKey overlaps nextGap: remove the gap
|
|
||||||
this._gaps.eraseElementByIterator(nextGapIt);
|
|
||||||
nRemoved += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// advance to the first key potentially overlapping with another gap
|
|
||||||
currentKeyIt = overlappingKeysSet.lowerBound(nextGap.lastKey);
|
|
||||||
currentKey = currentKeyIt.isAccessible() && currentKeyIt.pointer;
|
|
||||||
}
|
|
||||||
return nRemoved;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Internal helper to coalesce multiple chained gaps into a single gap.
|
|
||||||
*
|
|
||||||
* It is only used to construct lookupGap() return values and
|
|
||||||
* doesn't modify the GapSet.
|
|
||||||
*
|
|
||||||
* NOTE: The function may take a noticeable amount of time and CPU
|
|
||||||
* to execute if a large number of chained gaps have to be
|
|
||||||
* coalesced, but it should never take more than a few seconds. In
|
|
||||||
* most cases it should take less than a millisecond. It regularly
|
|
||||||
* yields to the nodejs event loop to avoid blocking it during a
|
|
||||||
* long execution.
|
|
||||||
*
|
|
||||||
* @param {GapSetEntry} firstGap - first gap of the chain to coalesce with
|
|
||||||
* the next ones in the chain
|
|
||||||
* @return {Promise<GapSetEntry>} - a new coalesced entry, as a Promise
|
|
||||||
*/
|
|
||||||
_coalesceGapChain(firstGap: GapSetEntry): Promise<GapSetEntry> {
|
|
||||||
return new Promise(resolve => {
|
|
||||||
const coalescedGap: GapSetEntry = Object.assign({}, firstGap);
|
|
||||||
const coalesceGapChainIteration = () => {
|
|
||||||
// efficiency trade-off: 100 iterations of log(N) complexity lookups should
|
|
||||||
// not block the event loop for too long
|
|
||||||
for (let opCounter = 0; opCounter < 100; ++opCounter) {
|
|
||||||
const chainedGapIt = this._gaps.find(
|
|
||||||
<GapSetEntry>{ firstKey: coalescedGap.lastKey });
|
|
||||||
if (!chainedGapIt.isAccessible()) {
|
|
||||||
// chain is complete
|
|
||||||
return resolve(coalescedGap);
|
|
||||||
}
|
|
||||||
const chainedGap = chainedGapIt.pointer;
|
|
||||||
if (chainedGap.firstKey === chainedGap.lastKey) {
|
|
||||||
// found a single-key gap: chain is complete
|
|
||||||
return resolve(coalescedGap);
|
|
||||||
}
|
|
||||||
coalescedGap.lastKey = chainedGap.lastKey;
|
|
||||||
coalescedGap.weight += chainedGap.weight;
|
|
||||||
}
|
|
||||||
// yield to the event loop before continuing the process
|
|
||||||
// of coalescing the gap chain
|
|
||||||
return process.nextTick(coalesceGapChainIteration);
|
|
||||||
};
|
|
||||||
coalesceGapChainIteration();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Lookup the next gap that overlaps with [minKey, maxKey]. Internally chained
|
|
||||||
* gaps are coalesced in the response into a single contiguous large gap.
|
|
||||||
*
|
|
||||||
* @param {string} minKey - minimum key overlapping with the returned gap
|
|
||||||
* @param {string} [maxKey] - maximum key overlapping with the returned gap
|
|
||||||
* @return {Promise<GapSetEntry | null>} - result of the lookup if a gap
|
|
||||||
* was found, null otherwise, as a Promise
|
|
||||||
*/
|
|
||||||
async lookupGap(minKey: string, maxKey?: string): Promise<GapSetEntry | null> {
|
|
||||||
let firstGap: GapSetEntry | null = null;
|
|
||||||
const minGapIt = this._gaps.reverseLowerBound(<GapSetEntry>{ firstKey: minKey });
|
|
||||||
const minGap = minGapIt.isAccessible() && minGapIt.pointer;
|
|
||||||
if (minGap && minGap.lastKey >= minKey) {
|
|
||||||
firstGap = minGap;
|
|
||||||
} else {
|
|
||||||
const maxGapIt = this._gaps.upperBound(<GapSetEntry>{ firstKey: minKey });
|
|
||||||
const maxGap = maxGapIt.isAccessible() && maxGapIt.pointer;
|
|
||||||
if (maxGap && (maxKey === undefined || maxGap.firstKey <= maxKey)) {
|
|
||||||
firstGap = maxGap;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!firstGap) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return this._coalesceGapChain(firstGap);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the maximum weight setting for individual gaps.
|
|
||||||
*
|
|
||||||
* @return {number} - maximum weight of individual gaps
|
|
||||||
*/
|
|
||||||
get maxWeight(): number {
|
|
||||||
return this._maxWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the maximum weight setting for individual gaps.
|
|
||||||
*
|
|
||||||
* @param {number} gapWeight - maximum weight of individual gaps
|
|
||||||
*/
|
|
||||||
set maxWeight(gapWeight: number) {
|
|
||||||
this._maxWeight = gapWeight;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the number of gaps stored in this set.
|
|
||||||
*
|
|
||||||
* @return {number} - number of gaps stored in this set
|
|
||||||
*/
|
|
||||||
get size(): number {
|
|
||||||
return this._gaps.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Iterate over each gap of the set, ordered by first key
|
|
||||||
*
|
|
||||||
* @return {Iterator<GapSetEntry>} - an iterator over all gaps
|
|
||||||
* Example:
|
|
||||||
* for (const gap of myGapSet) { ... }
|
|
||||||
*/
|
|
||||||
[Symbol.iterator](): Iterator<GapSetEntry> {
|
|
||||||
return this._gaps[Symbol.iterator]();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return an array containing all gaps, ordered by first key
|
|
||||||
*
|
|
||||||
* NOTE: there is a toArray() method in the OrderedSet implementation
|
|
||||||
* but it does not scale well and overflows the stack quickly. This is
|
|
||||||
* why we provide an implementation based on an iterator.
|
|
||||||
*
|
|
||||||
* @return {GapSetEntry[]} - an array containing all gaps
|
|
||||||
*/
|
|
||||||
toArray(): GapSetEntry[] {
|
|
||||||
return [...this];
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,6 +1,6 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
const { FILTER_ACCEPT, SKIP_NONE } = require('./tools');
|
const { FILTER_SKIP, SKIP_NONE } = require('./tools');
|
||||||
|
|
||||||
// Use a heuristic to amortize the cost of JSON
|
// Use a heuristic to amortize the cost of JSON
|
||||||
// serialization/deserialization only on largest metadata where the
|
// serialization/deserialization only on largest metadata where the
|
||||||
|
@ -92,26 +92,21 @@ class Extension {
|
||||||
* @param {object} entry - a listing entry from metadata
|
* @param {object} entry - a listing entry from metadata
|
||||||
* expected format: { key, value }
|
* expected format: { key, value }
|
||||||
* @return {number} - result of filtering the entry:
|
* @return {number} - result of filtering the entry:
|
||||||
* FILTER_ACCEPT: entry is accepted and may or not be included
|
* > 0: entry is accepted and included in the result
|
||||||
* in the result
|
* = 0: entry is accepted but not included (skipping)
|
||||||
* FILTER_SKIP: listing may skip directly (with "gte" param) to
|
* < 0: entry is not accepted, listing should finish
|
||||||
* the key returned by the skipping() method
|
|
||||||
* FILTER_END: the results are complete, listing can be stopped
|
|
||||||
*/
|
*/
|
||||||
filter(/* entry: { key, value } */) {
|
filter(entry) {
|
||||||
return FILTER_ACCEPT;
|
return entry ? FILTER_SKIP : FILTER_SKIP;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provides the next key at which the listing task is allowed to skip to.
|
* Provides the insight into why filter is skipping an entry. This could be
|
||||||
* This could allow to skip over:
|
* because it is skipping a range of delimited keys or a range of specific
|
||||||
* - a key prefix ending with the delimiter
|
* version when doing master version listing.
|
||||||
* - all remaining versions of an object when doing a current
|
|
||||||
* versions listing in v0 format
|
|
||||||
* - a cached "gap" of deleted objects when doing a current
|
|
||||||
* versions listing in v0 format
|
|
||||||
*
|
*
|
||||||
* @return {string} - the next key at which the listing task is allowed to skip to
|
* @return {string} - the insight: a common prefix or a master key,
|
||||||
|
* or SKIP_NONE if there is no insight
|
||||||
*/
|
*/
|
||||||
skipping() {
|
skipping() {
|
||||||
return SKIP_NONE;
|
return SKIP_NONE;
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
const { inc, checkLimit, listingParamsMasterKeysV0ToV1,
|
||||||
FILTER_END, FILTER_ACCEPT, SKIP_NONE } = require('./tools');
|
FILTER_END, FILTER_ACCEPT } = require('./tools');
|
||||||
const DEFAULT_MAX_KEYS = 1000;
|
const DEFAULT_MAX_KEYS = 1000;
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
@ -163,7 +163,7 @@ class MultipartUploads {
|
||||||
}
|
}
|
||||||
|
|
||||||
skipping() {
|
skipping() {
|
||||||
return SKIP_NONE;
|
return '';
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
const Extension = require('./Extension').default;
|
||||||
|
|
||||||
const { checkLimit, FILTER_END, FILTER_ACCEPT } = require('./tools');
|
const { checkLimit, FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
||||||
const DEFAULT_MAX_KEYS = 10000;
|
const DEFAULT_MAX_KEYS = 10000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -91,7 +91,7 @@ class List extends Extension {
|
||||||
* < 0 : listing done
|
* < 0 : listing done
|
||||||
*/
|
*/
|
||||||
filter(elem) {
|
filter(elem) {
|
||||||
// Check if the result array is full
|
// Check first in case of maxkeys <= 0
|
||||||
if (this.keys >= this.maxKeys) {
|
if (this.keys >= this.maxKeys) {
|
||||||
return FILTER_END;
|
return FILTER_END;
|
||||||
}
|
}
|
||||||
|
@ -99,7 +99,7 @@ class List extends Extension {
|
||||||
this.filterKeyStartsWith !== undefined) &&
|
this.filterKeyStartsWith !== undefined) &&
|
||||||
typeof elem === 'object' &&
|
typeof elem === 'object' &&
|
||||||
!this.customFilter(elem.value)) {
|
!this.customFilter(elem.value)) {
|
||||||
return FILTER_ACCEPT;
|
return FILTER_SKIP;
|
||||||
}
|
}
|
||||||
if (typeof elem === 'object') {
|
if (typeof elem === 'object') {
|
||||||
this.res.push({
|
this.res.push({
|
||||||
|
|
|
@ -0,0 +1,274 @@
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
|
const Extension = require('./Extension').default;
|
||||||
|
const { inc, listingParamsMasterKeysV0ToV1,
|
||||||
|
FILTER_END, FILTER_ACCEPT, FILTER_SKIP } = require('./tools');
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Find the common prefix in the path
|
||||||
|
*
|
||||||
|
* @param {String} key - path of the object
|
||||||
|
* @param {String} delimiter - separator
|
||||||
|
* @param {Number} delimiterIndex - 'folder' index in the path
|
||||||
|
* @return {String} - CommonPrefix
|
||||||
|
*/
|
||||||
|
function getCommonPrefix(key, delimiter, delimiterIndex) {
|
||||||
|
return key.substring(0, delimiterIndex + delimiter.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters
|
||||||
|
*
|
||||||
|
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
||||||
|
* @prop {String[]} Contents - 'files' to list
|
||||||
|
* @prop {Boolean} IsTruncated - truncated listing flag
|
||||||
|
* @prop {String|undefined} NextMarker - marker per amazon format
|
||||||
|
* @prop {Number} keys - count of listed keys
|
||||||
|
* @prop {String|undefined} delimiter - separator per amazon format
|
||||||
|
* @prop {String|undefined} prefix - prefix per amazon format
|
||||||
|
* @prop {Number} maxKeys - number of keys to list
|
||||||
|
*/
|
||||||
|
class Delimiter extends Extension {
|
||||||
|
/**
|
||||||
|
* Create a new Delimiter instance
|
||||||
|
* @constructor
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} [parameters.delimiter] - delimiter per amazon
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.prefix] - prefix per amazon
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.marker] - marker per amazon
|
||||||
|
* format
|
||||||
|
* @param {Number} [parameters.maxKeys] - number of keys to list
|
||||||
|
* @param {Boolean} [parameters.v2] - indicates whether v2
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.startAfter] - marker per amazon
|
||||||
|
* format
|
||||||
|
* @param {String} [parameters.continuationToken] - obfuscated amazon
|
||||||
|
* token
|
||||||
|
* @param {Boolean} [parameters.alphabeticalOrder] - Either the result is
|
||||||
|
* alphabetically ordered
|
||||||
|
* or not
|
||||||
|
* @param {RequestLogger} logger - The logger of the
|
||||||
|
* request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger);
|
||||||
|
// original listing parameters
|
||||||
|
this.delimiter = parameters.delimiter;
|
||||||
|
this.prefix = parameters.prefix;
|
||||||
|
this.marker = parameters.marker;
|
||||||
|
this.maxKeys = parameters.maxKeys || 1000;
|
||||||
|
this.startAfter = parameters.startAfter;
|
||||||
|
this.continuationToken = parameters.continuationToken;
|
||||||
|
this.alphabeticalOrder =
|
||||||
|
typeof parameters.alphabeticalOrder !== 'undefined' ?
|
||||||
|
parameters.alphabeticalOrder : true;
|
||||||
|
|
||||||
|
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
||||||
|
// results
|
||||||
|
this.CommonPrefixes = [];
|
||||||
|
this.Contents = [];
|
||||||
|
this.IsTruncated = false;
|
||||||
|
this.NextMarker = parameters.marker;
|
||||||
|
this.NextContinuationToken =
|
||||||
|
parameters.continuationToken || parameters.startAfter;
|
||||||
|
|
||||||
|
this.startMarker = parameters.v2 ? 'startAfter' : 'marker';
|
||||||
|
this.continueMarker = parameters.v2 ? 'continuationToken' : 'marker';
|
||||||
|
this.nextContinueMarker = parameters.v2 ?
|
||||||
|
'NextContinuationToken' : 'NextMarker';
|
||||||
|
|
||||||
|
if (this.delimiter !== undefined &&
|
||||||
|
this[this.nextContinueMarker] !== undefined &&
|
||||||
|
this[this.nextContinueMarker].startsWith(this.prefix || '')) {
|
||||||
|
const nextDelimiterIndex =
|
||||||
|
this[this.nextContinueMarker].indexOf(this.delimiter,
|
||||||
|
this.prefix ? this.prefix.length : 0);
|
||||||
|
this[this.nextContinueMarker] =
|
||||||
|
this[this.nextContinueMarker].slice(0, nextDelimiterIndex +
|
||||||
|
this.delimiter.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
Object.assign(this, {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
genMDParams: this.genMDParamsV0,
|
||||||
|
getObjectKey: this.getObjectKeyV0,
|
||||||
|
skipping: this.skippingV0,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
genMDParams: this.genMDParamsV1,
|
||||||
|
getObjectKey: this.getObjectKeyV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
|
}[this.vFormat]);
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV0() {
|
||||||
|
const params = {};
|
||||||
|
if (this.prefix) {
|
||||||
|
params.gte = this.prefix;
|
||||||
|
params.lt = inc(this.prefix);
|
||||||
|
}
|
||||||
|
const startVal = this[this.continueMarker] || this[this.startMarker];
|
||||||
|
if (startVal) {
|
||||||
|
if (params.gte && params.gte > startVal) {
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
delete params.gte;
|
||||||
|
params.gt = startVal;
|
||||||
|
}
|
||||||
|
return params;
|
||||||
|
}
|
||||||
|
|
||||||
|
genMDParamsV1() {
|
||||||
|
const params = this.genMDParamsV0();
|
||||||
|
return listingParamsMasterKeysV0ToV1(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check if the max keys count has been reached and set the
|
||||||
|
* final state of the result if it is the case
|
||||||
|
* @return {Boolean} - indicates if the iteration has to stop
|
||||||
|
*/
|
||||||
|
_reachedMaxKeys() {
|
||||||
|
if (this.keys >= this.maxKeys) {
|
||||||
|
// In cases of maxKeys <= 0 -> IsTruncated = false
|
||||||
|
this.IsTruncated = this.maxKeys > 0;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a (key, value) tuple to the listing
|
||||||
|
* Set the NextMarker to the current key
|
||||||
|
* Increment the keys counter
|
||||||
|
* @param {String} key - The key to add
|
||||||
|
* @param {String} value - The value of the key
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
addContents(key, value) {
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
this.Contents.push({ key, value: this.trimMetadata(value) });
|
||||||
|
this[this.nextContinueMarker] = key;
|
||||||
|
++this.keys;
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV0(obj) {
|
||||||
|
return obj.key;
|
||||||
|
}
|
||||||
|
|
||||||
|
getObjectKeyV1(obj) {
|
||||||
|
return obj.key.slice(DbPrefixes.Master.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter to apply on each iteration, based on:
|
||||||
|
* - prefix
|
||||||
|
* - delimiter
|
||||||
|
* - maxKeys
|
||||||
|
* The marker is being handled directly by levelDB
|
||||||
|
* @param {Object} obj - The key and value of the element
|
||||||
|
* @param {String} obj.key - The key of the element
|
||||||
|
* @param {String} obj.value - The value of the element
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
filter(obj) {
|
||||||
|
const key = this.getObjectKey(obj);
|
||||||
|
const value = obj.value;
|
||||||
|
if ((this.prefix && !key.startsWith(this.prefix))
|
||||||
|
|| (this.alphabeticalOrder
|
||||||
|
&& typeof this[this.nextContinueMarker] === 'string'
|
||||||
|
&& key <= this[this.nextContinueMarker])) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
if (this.delimiter) {
|
||||||
|
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||||
|
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||||
|
if (delimiterIndex === -1) {
|
||||||
|
return this.addContents(key, value);
|
||||||
|
}
|
||||||
|
return this.addCommonPrefix(key, delimiterIndex);
|
||||||
|
}
|
||||||
|
return this.addContents(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a Common Prefix in the list
|
||||||
|
* @param {String} key - object name
|
||||||
|
* @param {Number} index - after prefix starting point
|
||||||
|
* @return {Boolean} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
addCommonPrefix(key, index) {
|
||||||
|
const commonPrefix = getCommonPrefix(key, this.delimiter, index);
|
||||||
|
if (this.CommonPrefixes.indexOf(commonPrefix) === -1
|
||||||
|
&& this[this.nextContinueMarker] !== commonPrefix) {
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
this.CommonPrefixes.push(commonPrefix);
|
||||||
|
this[this.nextContinueMarker] = commonPrefix;
|
||||||
|
++this.keys;
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If repd happens to want to skip listing on a bucket in v0
|
||||||
|
* versioning key format, here is an idea.
|
||||||
|
*
|
||||||
|
* @return {string} - the present range (NextMarker) if repd believes
|
||||||
|
* that it's enough and should move on
|
||||||
|
*/
|
||||||
|
skippingV0() {
|
||||||
|
return this[this.nextContinueMarker];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If repd happens to want to skip listing on a bucket in v1
|
||||||
|
* versioning key format, here is an idea.
|
||||||
|
*
|
||||||
|
* @return {string} - the present range (NextMarker) if repd believes
|
||||||
|
* that it's enough and should move on
|
||||||
|
*/
|
||||||
|
skippingV1() {
|
||||||
|
return DbPrefixes.Master + this[this.nextContinueMarker];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return an object containing all mandatory fields to use once the
|
||||||
|
* iteration is done, doesn't show a NextMarker field if the output
|
||||||
|
* isn't truncated
|
||||||
|
* @return {Object} - following amazon format
|
||||||
|
*/
|
||||||
|
result() {
|
||||||
|
/* NextMarker is only provided when delimiter is used.
|
||||||
|
* specified in v1 listing documentation
|
||||||
|
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
||||||
|
*/
|
||||||
|
const result = {
|
||||||
|
CommonPrefixes: this.CommonPrefixes,
|
||||||
|
Contents: this.Contents,
|
||||||
|
IsTruncated: this.IsTruncated,
|
||||||
|
Delimiter: this.delimiter,
|
||||||
|
};
|
||||||
|
if (this.parameters.v2) {
|
||||||
|
result.NextContinuationToken = this.IsTruncated
|
||||||
|
? this.NextContinuationToken : undefined;
|
||||||
|
} else {
|
||||||
|
result.NextMarker = (this.IsTruncated && this.delimiter)
|
||||||
|
? this.NextMarker : undefined;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { Delimiter };
|
|
@ -1,356 +0,0 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
|
||||||
const { inc, listingParamsMasterKeysV0ToV1,
|
|
||||||
FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
|
||||||
|
|
||||||
export interface FilterState {
|
|
||||||
id: number,
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface FilterReturnValue {
|
|
||||||
FILTER_ACCEPT,
|
|
||||||
FILTER_SKIP,
|
|
||||||
FILTER_END,
|
|
||||||
};
|
|
||||||
|
|
||||||
export const enum DelimiterFilterStateId {
|
|
||||||
NotSkipping = 1,
|
|
||||||
SkippingPrefix = 2,
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface DelimiterFilterState_NotSkipping extends FilterState {
|
|
||||||
id: DelimiterFilterStateId.NotSkipping,
|
|
||||||
};
|
|
||||||
|
|
||||||
export interface DelimiterFilterState_SkippingPrefix extends FilterState {
|
|
||||||
id: DelimiterFilterStateId.SkippingPrefix,
|
|
||||||
prefix: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type KeyHandler = (key: string, value: string) => FilterReturnValue;
|
|
||||||
|
|
||||||
export type ResultObject = {
|
|
||||||
CommonPrefixes: string[];
|
|
||||||
Contents: {
|
|
||||||
key: string;
|
|
||||||
value: string;
|
|
||||||
}[];
|
|
||||||
IsTruncated: boolean;
|
|
||||||
Delimiter ?: string;
|
|
||||||
NextMarker ?: string;
|
|
||||||
NextContinuationToken ?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters
|
|
||||||
*
|
|
||||||
* @prop {String[]} CommonPrefixes - 'folders' defined by the delimiter
|
|
||||||
* @prop {String[]} Contents - 'files' to list
|
|
||||||
* @prop {Boolean} IsTruncated - truncated listing flag
|
|
||||||
* @prop {String|undefined} NextMarker - marker per amazon format
|
|
||||||
* @prop {Number} keys - count of listed keys
|
|
||||||
* @prop {String|undefined} delimiter - separator per amazon format
|
|
||||||
* @prop {String|undefined} prefix - prefix per amazon format
|
|
||||||
* @prop {Number} maxKeys - number of keys to list
|
|
||||||
*/
|
|
||||||
export class Delimiter extends Extension {
|
|
||||||
|
|
||||||
state: FilterState;
|
|
||||||
keyHandlers: { [id: number]: KeyHandler };
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new Delimiter instance
|
|
||||||
* @constructor
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} [parameters.delimiter] - delimiter per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.prefix] - prefix per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.marker] - marker per amazon
|
|
||||||
* format
|
|
||||||
* @param {Number} [parameters.maxKeys] - number of keys to list
|
|
||||||
* @param {Boolean} [parameters.v2] - indicates whether v2
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.startAfter] - marker per amazon
|
|
||||||
* format
|
|
||||||
* @param {String} [parameters.continuationToken] - obfuscated amazon
|
|
||||||
* token
|
|
||||||
* @param {RequestLogger} logger - The logger of the
|
|
||||||
* request
|
|
||||||
* @param {String} [vFormat] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
super(parameters, logger);
|
|
||||||
// original listing parameters
|
|
||||||
this.delimiter = parameters.delimiter;
|
|
||||||
this.prefix = parameters.prefix;
|
|
||||||
this.maxKeys = parameters.maxKeys || 1000;
|
|
||||||
|
|
||||||
if (parameters.v2) {
|
|
||||||
this.marker = parameters.continuationToken || parameters.startAfter;
|
|
||||||
} else {
|
|
||||||
this.marker = parameters.marker;
|
|
||||||
}
|
|
||||||
this.nextMarker = this.marker;
|
|
||||||
|
|
||||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
|
||||||
// results
|
|
||||||
this.CommonPrefixes = [];
|
|
||||||
this.Contents = [];
|
|
||||||
this.IsTruncated = false;
|
|
||||||
this.keyHandlers = {};
|
|
||||||
|
|
||||||
Object.assign(this, {
|
|
||||||
[BucketVersioningKeyFormat.v0]: {
|
|
||||||
genMDParams: this.genMDParamsV0,
|
|
||||||
getObjectKey: this.getObjectKeyV0,
|
|
||||||
skipping: this.skippingV0,
|
|
||||||
},
|
|
||||||
[BucketVersioningKeyFormat.v1]: {
|
|
||||||
genMDParams: this.genMDParamsV1,
|
|
||||||
getObjectKey: this.getObjectKeyV1,
|
|
||||||
skipping: this.skippingV1,
|
|
||||||
},
|
|
||||||
}[this.vFormat]);
|
|
||||||
|
|
||||||
// if there is a delimiter, we may skip ranges by prefix,
|
|
||||||
// hence using the NotSkippingPrefix flavor that checks the
|
|
||||||
// subprefix up to the delimiter for the NotSkipping state
|
|
||||||
if (this.delimiter) {
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterFilterStateId.NotSkipping,
|
|
||||||
this.keyHandler_NotSkippingPrefix.bind(this));
|
|
||||||
} else {
|
|
||||||
// listing without a delimiter never has to skip over any
|
|
||||||
// prefix -> use NeverSkipping flavor for the NotSkipping
|
|
||||||
// state
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterFilterStateId.NotSkipping,
|
|
||||||
this.keyHandler_NeverSkipping.bind(this));
|
|
||||||
}
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterFilterStateId.SkippingPrefix,
|
|
||||||
this.keyHandler_SkippingPrefix.bind(this));
|
|
||||||
|
|
||||||
this.state = <DelimiterFilterState_NotSkipping> {
|
|
||||||
id: DelimiterFilterStateId.NotSkipping,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV0() {
|
|
||||||
const params: { gt ?: string, gte ?: string, lt ?: string } = {};
|
|
||||||
if (this.prefix) {
|
|
||||||
params.gte = this.prefix;
|
|
||||||
params.lt = inc(this.prefix);
|
|
||||||
}
|
|
||||||
if (this.marker && this.delimiter) {
|
|
||||||
const commonPrefix = this.getCommonPrefix(this.marker);
|
|
||||||
if (commonPrefix) {
|
|
||||||
const afterPrefix = inc(commonPrefix);
|
|
||||||
if (!params.gte || afterPrefix > params.gte) {
|
|
||||||
params.gte = afterPrefix;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (this.marker && (!params.gte || this.marker >= params.gte)) {
|
|
||||||
delete params.gte;
|
|
||||||
params.gt = this.marker;
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV1() {
|
|
||||||
const params = this.genMDParamsV0();
|
|
||||||
return listingParamsMasterKeysV0ToV1(params);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* check if the max keys count has been reached and set the
|
|
||||||
* final state of the result if it is the case
|
|
||||||
* @return {Boolean} - indicates if the iteration has to stop
|
|
||||||
*/
|
|
||||||
_reachedMaxKeys(): boolean {
|
|
||||||
if (this.keys >= this.maxKeys) {
|
|
||||||
// In cases of maxKeys <= 0 -> IsTruncated = false
|
|
||||||
this.IsTruncated = this.maxKeys > 0;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a (key, value) tuple to the listing
|
|
||||||
* Set the NextMarker to the current key
|
|
||||||
* Increment the keys counter
|
|
||||||
* @param {String} key - The key to add
|
|
||||||
* @param {String} value - The value of the key
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
addContents(key: string, value: string): void {
|
|
||||||
this.Contents.push({ key, value: this.trimMetadata(value) });
|
|
||||||
++this.keys;
|
|
||||||
this.nextMarker = key;
|
|
||||||
}
|
|
||||||
|
|
||||||
getCommonPrefix(key: string): string | undefined {
|
|
||||||
if (!this.delimiter) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
|
||||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
|
||||||
if (delimiterIndex === -1) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
return key.substring(0, delimiterIndex + this.delimiter.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add a Common Prefix in the list
|
|
||||||
* @param {String} commonPrefix - common prefix to add
|
|
||||||
* @param {String} key - full key starting with commonPrefix
|
|
||||||
* @return {Boolean} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
addCommonPrefix(commonPrefix: string, key: string): void {
|
|
||||||
// add the new prefix to the list
|
|
||||||
this.CommonPrefixes.push(commonPrefix);
|
|
||||||
++this.keys;
|
|
||||||
this.nextMarker = commonPrefix;
|
|
||||||
}
|
|
||||||
|
|
||||||
addCommonPrefixOrContents(key: string, value: string): string | undefined {
|
|
||||||
// add the subprefix to the common prefixes if the key has the delimiter
|
|
||||||
const commonPrefix = this.getCommonPrefix(key);
|
|
||||||
if (commonPrefix) {
|
|
||||||
this.addCommonPrefix(commonPrefix, key);
|
|
||||||
return commonPrefix;
|
|
||||||
}
|
|
||||||
this.addContents(key, value);
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
getObjectKeyV0(obj: { key: string }): string {
|
|
||||||
return obj.key;
|
|
||||||
}
|
|
||||||
|
|
||||||
getObjectKeyV1(obj: { key: string }): string {
|
|
||||||
return obj.key.slice(DbPrefixes.Master.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Filter to apply on each iteration, based on:
|
|
||||||
* - prefix
|
|
||||||
* - delimiter
|
|
||||||
* - maxKeys
|
|
||||||
* The marker is being handled directly by levelDB
|
|
||||||
* @param {Object} obj - The key and value of the element
|
|
||||||
* @param {String} obj.key - The key of the element
|
|
||||||
* @param {String} obj.value - The value of the element
|
|
||||||
* @return {number} - indicates if iteration should continue
|
|
||||||
*/
|
|
||||||
filter(obj: { key: string, value: string }): FilterReturnValue {
|
|
||||||
const key = this.getObjectKey(obj);
|
|
||||||
const value = obj.value;
|
|
||||||
|
|
||||||
return this.handleKey(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
setState(state: FilterState): void {
|
|
||||||
this.state = state;
|
|
||||||
}
|
|
||||||
|
|
||||||
setKeyHandler(stateId: number, keyHandler: KeyHandler): void {
|
|
||||||
this.keyHandlers[stateId] = keyHandler;
|
|
||||||
}
|
|
||||||
|
|
||||||
handleKey(key: string, value: string): FilterReturnValue {
|
|
||||||
return this.keyHandlers[this.state.id](key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_NeverSkipping(key: string, value: string): FilterReturnValue {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
this.addContents(key, value);
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_NotSkippingPrefix(key: string, value: string): FilterReturnValue {
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
|
||||||
if (commonPrefix) {
|
|
||||||
// transition into SkippingPrefix state to skip all following keys
|
|
||||||
// while they start with the same prefix
|
|
||||||
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
|
||||||
id: DelimiterFilterStateId.SkippingPrefix,
|
|
||||||
prefix: commonPrefix,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_SkippingPrefix(key: string, value: string): FilterReturnValue {
|
|
||||||
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
|
||||||
if (key.startsWith(prefix)) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
this.setState(<DelimiterFilterState_NotSkipping> {
|
|
||||||
id: DelimiterFilterStateId.NotSkipping,
|
|
||||||
});
|
|
||||||
return this.handleKey(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingBase(): string | undefined {
|
|
||||||
switch (this.state.id) {
|
|
||||||
case DelimiterFilterStateId.SkippingPrefix:
|
|
||||||
const { prefix } = <DelimiterFilterState_SkippingPrefix> this.state;
|
|
||||||
return inc(prefix);
|
|
||||||
|
|
||||||
default:
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV0() {
|
|
||||||
return this.skippingBase();
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingV1() {
|
|
||||||
const skipTo = this.skippingBase();
|
|
||||||
if (skipTo === SKIP_NONE) {
|
|
||||||
return SKIP_NONE;
|
|
||||||
}
|
|
||||||
return DbPrefixes.Master + skipTo;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return an object containing all mandatory fields to use once the
|
|
||||||
* iteration is done, doesn't show a NextMarker field if the output
|
|
||||||
* isn't truncated
|
|
||||||
* @return {Object} - following amazon format
|
|
||||||
*/
|
|
||||||
result(): ResultObject {
|
|
||||||
/* NextMarker is only provided when delimiter is used.
|
|
||||||
* specified in v1 listing documentation
|
|
||||||
* http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
|
|
||||||
*/
|
|
||||||
const result: ResultObject = {
|
|
||||||
CommonPrefixes: this.CommonPrefixes,
|
|
||||||
Contents: this.Contents,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
Delimiter: this.delimiter,
|
|
||||||
};
|
|
||||||
if (this.parameters.v2) {
|
|
||||||
result.NextContinuationToken = this.IsTruncated
|
|
||||||
? this.nextMarker : undefined;
|
|
||||||
} else {
|
|
||||||
result.NextMarker = (this.IsTruncated && this.delimiter)
|
|
||||||
? this.nextMarker : undefined;
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,127 +0,0 @@
|
||||||
const { DelimiterMaster } = require('./delimiterMaster');
|
|
||||||
const { FILTER_ACCEPT, FILTER_END } = require('./tools');
|
|
||||||
|
|
||||||
type ResultObject = {
|
|
||||||
Contents: {
|
|
||||||
key: string;
|
|
||||||
value: string;
|
|
||||||
}[];
|
|
||||||
IsTruncated: boolean;
|
|
||||||
NextMarker ?: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters. This extends the base class DelimiterMaster
|
|
||||||
* to return the master/current versions.
|
|
||||||
*/
|
|
||||||
class DelimiterCurrent extends DelimiterMaster {
|
|
||||||
/**
|
|
||||||
* Delimiter listing of current versions.
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
|
||||||
* @param {String} parameters.excludedDataStoreName - excluded datatore name
|
|
||||||
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
|
||||||
* @param {RequestLogger} logger - The logger of the request
|
|
||||||
* @param {String} [vFormat] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
super(parameters, logger, vFormat);
|
|
||||||
|
|
||||||
this.beforeDate = parameters.beforeDate;
|
|
||||||
this.excludedDataStoreName = parameters.excludedDataStoreName;
|
|
||||||
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries;
|
|
||||||
this.scannedKeys = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
genMDParamsV0() {
|
|
||||||
const params = super.genMDParamsV0();
|
|
||||||
// lastModified and dataStoreName parameters are used by metadata that enables built-in filtering,
|
|
||||||
// a feature currently exclusive to MongoDB
|
|
||||||
if (this.beforeDate) {
|
|
||||||
params.lastModified = {
|
|
||||||
lt: this.beforeDate,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.excludedDataStoreName) {
|
|
||||||
params.dataStoreName = {
|
|
||||||
ne: this.excludedDataStoreName,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parses the stringified entry's value.
|
|
||||||
* @param s - sringified value
|
|
||||||
* @return - undefined if parsing fails, otherwise it contains the parsed value.
|
|
||||||
*/
|
|
||||||
_parse(s) {
|
|
||||||
let p;
|
|
||||||
try {
|
|
||||||
p = JSON.parse(s);
|
|
||||||
} catch (e: any) {
|
|
||||||
this.logger.warn(
|
|
||||||
'Could not parse Object Metadata while listing',
|
|
||||||
{ err: e.toString() });
|
|
||||||
}
|
|
||||||
return p;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* check if the max keys count has been reached and set the
|
|
||||||
* final state of the result if it is the case
|
|
||||||
*
|
|
||||||
* specialized implementation on DelimiterCurrent to also check
|
|
||||||
* the number of scanned keys
|
|
||||||
*
|
|
||||||
* @return {Boolean} - indicates if the iteration has to stop
|
|
||||||
*/
|
|
||||||
_reachedMaxKeys(): boolean {
|
|
||||||
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
|
|
||||||
this.IsTruncated = true;
|
|
||||||
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
|
||||||
{
|
|
||||||
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
|
||||||
scannedKeys: this.scannedKeys,
|
|
||||||
});
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return super._reachedMaxKeys();
|
|
||||||
}
|
|
||||||
|
|
||||||
addContents(key, value) {
|
|
||||||
++this.scannedKeys;
|
|
||||||
const parsedValue = this._parse(value);
|
|
||||||
// if parsing fails, skip the key.
|
|
||||||
if (parsedValue) {
|
|
||||||
const lastModified = parsedValue['last-modified'];
|
|
||||||
const dataStoreName = parsedValue.dataStoreName;
|
|
||||||
// We then check if the current version is older than the "beforeDate" and
|
|
||||||
// "excludedDataStoreName" is not specified or if specified and the data store name is different.
|
|
||||||
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) &&
|
|
||||||
(!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName)) {
|
|
||||||
super.addContents(key, value);
|
|
||||||
}
|
|
||||||
// In the event of a timeout occurring before any content is added,
|
|
||||||
// NextMarker is updated even if the object is not eligible.
|
|
||||||
// It minimizes the amount of data that the client needs to re-process if the request times out.
|
|
||||||
this.nextMarker = key;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result(): object {
|
|
||||||
const result: ResultObject = {
|
|
||||||
Contents: this.Contents,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.IsTruncated) {
|
|
||||||
result.NextMarker = this.nextMarker;
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
module.exports = { DelimiterCurrent };
|
|
|
@ -0,0 +1,196 @@
|
||||||
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
|
const Delimiter = require('./delimiter').Delimiter;
|
||||||
|
const Version = require('../../versioning/Version').Version;
|
||||||
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
|
const { BucketVersioningKeyFormat } = VSConst;
|
||||||
|
const { FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } = require('./tools');
|
||||||
|
|
||||||
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
|
const { DbPrefixes } = VSConst;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle object listing with parameters. This extends the base class Delimiter
|
||||||
|
* to return the raw master versions of existing objects.
|
||||||
|
*/
|
||||||
|
class DelimiterMaster extends Delimiter {
|
||||||
|
/**
|
||||||
|
* Delimiter listing of master versions.
|
||||||
|
* @param {Object} parameters - listing parameters
|
||||||
|
* @param {String} parameters.delimiter - delimiter per amazon format
|
||||||
|
* @param {String} parameters.prefix - prefix per amazon format
|
||||||
|
* @param {String} parameters.marker - marker per amazon format
|
||||||
|
* @param {Number} parameters.maxKeys - number of keys to list
|
||||||
|
* @param {Boolean} parameters.v2 - indicates whether v2 format
|
||||||
|
* @param {String} parameters.startAfter - marker per amazon v2 format
|
||||||
|
* @param {String} parameters.continuationToken - obfuscated amazon token
|
||||||
|
* @param {RequestLogger} logger - The logger of the request
|
||||||
|
* @param {String} [vFormat] - versioning key format
|
||||||
|
*/
|
||||||
|
constructor(parameters, logger, vFormat) {
|
||||||
|
super(parameters, logger, vFormat);
|
||||||
|
// non-PHD master version or a version whose master is a PHD version
|
||||||
|
this.prvKey = undefined;
|
||||||
|
this.prvPHDKey = undefined;
|
||||||
|
this.inReplayPrefix = false;
|
||||||
|
|
||||||
|
Object.assign(this, {
|
||||||
|
[BucketVersioningKeyFormat.v0]: {
|
||||||
|
filter: this.filterV0,
|
||||||
|
skipping: this.skippingV0,
|
||||||
|
},
|
||||||
|
[BucketVersioningKeyFormat.v1]: {
|
||||||
|
filter: this.filterV1,
|
||||||
|
skipping: this.skippingV1,
|
||||||
|
},
|
||||||
|
}[this.vFormat]);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter to apply on each iteration for buckets in v0 format,
|
||||||
|
* based on:
|
||||||
|
* - prefix
|
||||||
|
* - delimiter
|
||||||
|
* - maxKeys
|
||||||
|
* The marker is being handled directly by levelDB
|
||||||
|
* @param {Object} obj - The key and value of the element
|
||||||
|
* @param {String} obj.key - The key of the element
|
||||||
|
* @param {String} obj.value - The value of the element
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
filterV0(obj) {
|
||||||
|
let key = obj.key;
|
||||||
|
const value = obj.value;
|
||||||
|
|
||||||
|
if (key.startsWith(DbPrefixes.Replay)) {
|
||||||
|
this.inReplayPrefix = true;
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
this.inReplayPrefix = false;
|
||||||
|
|
||||||
|
/* Skip keys not starting with the prefix or not alphabetically
|
||||||
|
* ordered. */
|
||||||
|
if ((this.prefix && !key.startsWith(this.prefix))
|
||||||
|
|| (typeof this[this.nextContinueMarker] === 'string' &&
|
||||||
|
key <= this[this.nextContinueMarker])) {
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Skip version keys (<key><versionIdSeparator><version>) if we already
|
||||||
|
* have a master version. */
|
||||||
|
const versionIdIndex = key.indexOf(VID_SEP);
|
||||||
|
if (versionIdIndex >= 0) {
|
||||||
|
key = key.slice(0, versionIdIndex);
|
||||||
|
/* - key === this.prvKey is triggered when a master version has
|
||||||
|
* been accepted for this key,
|
||||||
|
* - key === this.NextMarker or this.NextContinueToken is triggered
|
||||||
|
* when a listing page ends on an accepted obj and the next page
|
||||||
|
* starts with a version of this object.
|
||||||
|
* In that case prvKey is default set to undefined
|
||||||
|
* in the constructor and comparing to NextMarker is the only
|
||||||
|
* way to know we should not accept this version. This test is
|
||||||
|
* not redundant with the one at the beginning of this function,
|
||||||
|
* we are comparing here the key without the version suffix,
|
||||||
|
* - key startsWith the previous NextMarker happens because we set
|
||||||
|
* NextMarker to the common prefix instead of the whole key
|
||||||
|
* value. (TODO: remove this test once ZENKO-1048 is fixed)
|
||||||
|
* */
|
||||||
|
if (key === this.prvKey || key === this[this.nextContinueMarker] ||
|
||||||
|
(this.delimiter &&
|
||||||
|
key.startsWith(this[this.nextContinueMarker]))) {
|
||||||
|
/* master version already filtered */
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (Version.isPHD(value)) {
|
||||||
|
/* master version is a PHD version, we want to wait for the next
|
||||||
|
* one:
|
||||||
|
* - Set the prvKey to undefined to not skip the next version,
|
||||||
|
* - return accept to avoid users to skip the next values in range
|
||||||
|
* (skip scan mechanism in metadata backend like Metadata or
|
||||||
|
* MongoClient). */
|
||||||
|
this.prvKey = undefined;
|
||||||
|
this.prvPHDKey = key;
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
if (Version.isDeleteMarker(value)) {
|
||||||
|
/* This entry is a deleteMarker which has not been filtered by the
|
||||||
|
* version test. Either :
|
||||||
|
* - it is a deleteMarker on the master version, we want to SKIP
|
||||||
|
* all the following entries with this key (no master version),
|
||||||
|
* - or a deleteMarker following a PHD (setting prvKey to undefined
|
||||||
|
* when an entry is a PHD avoids the skip on version for the
|
||||||
|
* next entry). In that case we expect the master version to
|
||||||
|
* follow. */
|
||||||
|
if (key === this.prvPHDKey) {
|
||||||
|
this.prvKey = undefined;
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
this.prvKey = key;
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.prvKey = key;
|
||||||
|
if (this.delimiter) {
|
||||||
|
// check if the key has the delimiter
|
||||||
|
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||||
|
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||||
|
if (delimiterIndex >= 0) {
|
||||||
|
// try to add the prefix to the list
|
||||||
|
return this.addCommonPrefix(key, delimiterIndex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return this.addContents(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filter to apply on each iteration for buckets in v1 format,
|
||||||
|
* based on:
|
||||||
|
* - prefix
|
||||||
|
* - delimiter
|
||||||
|
* - maxKeys
|
||||||
|
* The marker is being handled directly by levelDB
|
||||||
|
* @param {Object} obj - The key and value of the element
|
||||||
|
* @param {String} obj.key - The key of the element
|
||||||
|
* @param {String} obj.value - The value of the element
|
||||||
|
* @return {number} - indicates if iteration should continue
|
||||||
|
*/
|
||||||
|
filterV1(obj) {
|
||||||
|
// Filtering master keys in v1 is simply listing the master
|
||||||
|
// keys, as the state of version keys do not change the
|
||||||
|
// result, so we can use Delimiter method directly.
|
||||||
|
return super.filter(obj);
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingBase() {
|
||||||
|
if (this[this.nextContinueMarker]) {
|
||||||
|
// next marker or next continuation token:
|
||||||
|
// - foo/ : skipping foo/
|
||||||
|
// - foo : skipping foo.
|
||||||
|
const index = this[this.nextContinueMarker].
|
||||||
|
lastIndexOf(this.delimiter);
|
||||||
|
if (index === this[this.nextContinueMarker].length - 1) {
|
||||||
|
return this[this.nextContinueMarker];
|
||||||
|
}
|
||||||
|
return this[this.nextContinueMarker] + VID_SEP;
|
||||||
|
}
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV0() {
|
||||||
|
if (this.inReplayPrefix) {
|
||||||
|
return DbPrefixes.Replay;
|
||||||
|
}
|
||||||
|
return this.skippingBase();
|
||||||
|
}
|
||||||
|
|
||||||
|
skippingV1() {
|
||||||
|
const skipTo = this.skippingBase();
|
||||||
|
if (skipTo === SKIP_NONE) {
|
||||||
|
return SKIP_NONE;
|
||||||
|
}
|
||||||
|
return DbPrefixes.Master + skipTo;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = { DelimiterMaster };
|
|
@ -1,620 +0,0 @@
|
||||||
import {
|
|
||||||
Delimiter,
|
|
||||||
FilterState,
|
|
||||||
FilterReturnValue,
|
|
||||||
DelimiterFilterStateId,
|
|
||||||
DelimiterFilterState_NotSkipping,
|
|
||||||
DelimiterFilterState_SkippingPrefix,
|
|
||||||
ResultObject,
|
|
||||||
} from './delimiter';
|
|
||||||
const Version = require('../../versioning/Version').Version;
|
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
|
||||||
const { BucketVersioningKeyFormat } = VSConst;
|
|
||||||
const { FILTER_ACCEPT, FILTER_SKIP, FILTER_END, SKIP_NONE, inc } = require('./tools');
|
|
||||||
|
|
||||||
import { GapSetEntry } from '../cache/GapSet';
|
|
||||||
import { GapCacheInterface } from '../cache/GapCache';
|
|
||||||
|
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
|
||||||
const { DbPrefixes } = VSConst;
|
|
||||||
|
|
||||||
export const enum DelimiterMasterFilterStateId {
|
|
||||||
SkippingVersionsV0 = 101,
|
|
||||||
WaitVersionAfterPHDV0 = 102,
|
|
||||||
SkippingGapV0 = 103,
|
|
||||||
};
|
|
||||||
|
|
||||||
interface DelimiterMasterFilterState_SkippingVersionsV0 extends FilterState {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
|
||||||
masterKey: string,
|
|
||||||
};
|
|
||||||
|
|
||||||
interface DelimiterMasterFilterState_WaitVersionAfterPHDV0 extends FilterState {
|
|
||||||
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
|
||||||
masterKey: string,
|
|
||||||
};
|
|
||||||
|
|
||||||
interface DelimiterMasterFilterState_SkippingGapV0 extends FilterState {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingGapV0,
|
|
||||||
};
|
|
||||||
|
|
||||||
export const enum GapCachingState {
|
|
||||||
NoGapCache = 0, // there is no gap cache
|
|
||||||
UnknownGap = 1, // waiting for a cache lookup
|
|
||||||
GapLookupInProgress = 2, // asynchronous gap lookup in progress
|
|
||||||
GapCached = 3, // an upcoming or already skippable gap is cached
|
|
||||||
NoMoreGap = 4, // the cache doesn't have any more gaps inside the listed range
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapCachingInfo_NoGapCache = {
|
|
||||||
state: GapCachingState.NoGapCache;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapCachingInfo_NoCachedGap = {
|
|
||||||
state: GapCachingState.UnknownGap
|
|
||||||
| GapCachingState.GapLookupInProgress
|
|
||||||
gapCache: GapCacheInterface;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapCachingInfo_GapCached = {
|
|
||||||
state: GapCachingState.GapCached;
|
|
||||||
gapCache: GapCacheInterface;
|
|
||||||
gapCached: GapSetEntry;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapCachingInfo_NoMoreGap = {
|
|
||||||
state: GapCachingState.NoMoreGap;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapCachingInfo = GapCachingInfo_NoGapCache
|
|
||||||
| GapCachingInfo_NoCachedGap
|
|
||||||
| GapCachingInfo_GapCached
|
|
||||||
| GapCachingInfo_NoMoreGap;
|
|
||||||
|
|
||||||
|
|
||||||
export const enum GapBuildingState {
|
|
||||||
Disabled = 0, // no gap cache or no gap building needed (e.g. in V1 versioning format)
|
|
||||||
NotBuilding = 1, // not currently building a gap (i.e. not listing within a gap)
|
|
||||||
Building = 2, // currently building a gap (i.e. listing within a gap)
|
|
||||||
Expired = 3, // not allowed to build due to exposure delay timeout
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapBuildingInfo_NothingToBuild = {
|
|
||||||
state: GapBuildingState.Disabled | GapBuildingState.Expired;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapBuildingParams = {
|
|
||||||
/**
|
|
||||||
* minimum weight for a gap to be created in the cache
|
|
||||||
*/
|
|
||||||
minGapWeight: number;
|
|
||||||
/**
|
|
||||||
* trigger a cache setGap() call every N skippable keys
|
|
||||||
*/
|
|
||||||
triggerSaveGapWeight: number;
|
|
||||||
/**
|
|
||||||
* timestamp to assess whether we're still inside the validity period to
|
|
||||||
* be allowed to build gaps
|
|
||||||
*/
|
|
||||||
initTimestamp: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapBuildingInfo_NotBuilding = {
|
|
||||||
state: GapBuildingState.NotBuilding;
|
|
||||||
gapCache: GapCacheInterface;
|
|
||||||
params: GapBuildingParams;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapBuildingInfo_Building = {
|
|
||||||
state: GapBuildingState.Building;
|
|
||||||
gapCache: GapCacheInterface;
|
|
||||||
params: GapBuildingParams;
|
|
||||||
/**
|
|
||||||
* Gap currently being created
|
|
||||||
*/
|
|
||||||
gap: GapSetEntry;
|
|
||||||
/**
|
|
||||||
* total current weight of the gap being created
|
|
||||||
*/
|
|
||||||
gapWeight: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
type GapBuildingInfo = GapBuildingInfo_NothingToBuild
|
|
||||||
| GapBuildingInfo_NotBuilding
|
|
||||||
| GapBuildingInfo_Building;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters. This extends the base class Delimiter
|
|
||||||
* to return the raw master versions of existing objects.
|
|
||||||
*/
|
|
||||||
export class DelimiterMaster extends Delimiter {
|
|
||||||
|
|
||||||
_gapCaching: GapCachingInfo;
|
|
||||||
_gapBuilding: GapBuildingInfo;
|
|
||||||
_refreshedBuildingParams: GapBuildingParams | null;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Delimiter listing of master versions.
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} [parameters.delimiter] - delimiter per amazon format
|
|
||||||
* @param {String} [parameters.prefix] - prefix per amazon format
|
|
||||||
* @param {String} [parameters.marker] - marker per amazon format
|
|
||||||
* @param {Number} [parameters.maxKeys] - number of keys to list
|
|
||||||
* @param {Boolean} [parameters.v2] - indicates whether v2 format
|
|
||||||
* @param {String} [parameters.startAfter] - marker per amazon v2 format
|
|
||||||
* @param {String} [parameters.continuationToken] - obfuscated amazon token
|
|
||||||
* @param {RequestLogger} logger - The logger of the request
|
|
||||||
* @param {String} [vFormat="v0"] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat?: string) {
|
|
||||||
super(parameters, logger, vFormat);
|
|
||||||
|
|
||||||
if (this.vFormat === BucketVersioningKeyFormat.v0) {
|
|
||||||
// override Delimiter's implementation of NotSkipping for
|
|
||||||
// DelimiterMaster logic (skipping versions and special
|
|
||||||
// handling of delete markers and PHDs)
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterFilterStateId.NotSkipping,
|
|
||||||
this.keyHandler_NotSkippingPrefixNorVersionsV0.bind(this));
|
|
||||||
|
|
||||||
// add extra state handlers specific to DelimiterMaster with v0 format
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterMasterFilterStateId.SkippingVersionsV0,
|
|
||||||
this.keyHandler_SkippingVersionsV0.bind(this));
|
|
||||||
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
|
||||||
this.keyHandler_WaitVersionAfterPHDV0.bind(this));
|
|
||||||
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterMasterFilterStateId.SkippingGapV0,
|
|
||||||
this.keyHandler_SkippingGapV0.bind(this));
|
|
||||||
|
|
||||||
if (this.marker) {
|
|
||||||
// distinct initial state to include some special logic
|
|
||||||
// before the first master key is found that does not have
|
|
||||||
// to be checked afterwards
|
|
||||||
this.state = <DelimiterMasterFilterState_SkippingVersionsV0> {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
|
||||||
masterKey: this.marker,
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
this.state = <DelimiterFilterState_NotSkipping> {
|
|
||||||
id: DelimiterFilterStateId.NotSkipping,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// save base implementation of the `NotSkipping` state in
|
|
||||||
// Delimiter before overriding it with ours, to be able to call it from there
|
|
||||||
this.keyHandler_NotSkipping_Delimiter = this.keyHandlers[DelimiterFilterStateId.NotSkipping];
|
|
||||||
this.setKeyHandler(
|
|
||||||
DelimiterFilterStateId.NotSkipping,
|
|
||||||
this.keyHandler_NotSkippingPrefixNorVersionsV1.bind(this));
|
|
||||||
}
|
|
||||||
// in v1, we can directly use Delimiter's implementation,
|
|
||||||
// which is already set to the proper state
|
|
||||||
|
|
||||||
// default initialization of the gap cache and building states, can be
|
|
||||||
// set by refreshGapCache()
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.NoGapCache,
|
|
||||||
};
|
|
||||||
this._gapBuilding = {
|
|
||||||
state: GapBuildingState.Disabled,
|
|
||||||
};
|
|
||||||
this._refreshedBuildingParams = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the validity period left before a refresh of the gap cache is needed
|
|
||||||
* to continue building new gaps.
|
|
||||||
*
|
|
||||||
* @return {number|null} one of:
|
|
||||||
* - the remaining time in milliseconds in which gaps can be added to the
|
|
||||||
* cache before a call to refreshGapCache() is required
|
|
||||||
* - or 0 if there is no time left and a call to refreshGapCache() is required
|
|
||||||
* to resume caching gaps
|
|
||||||
* - or null if refreshing the cache is never needed (because the gap cache
|
|
||||||
* is either not available or not used)
|
|
||||||
*/
|
|
||||||
getGapBuildingValidityPeriodMs(): number | null {
|
|
||||||
let gapBuilding;
|
|
||||||
switch (this._gapBuilding.state) {
|
|
||||||
case GapBuildingState.Disabled:
|
|
||||||
return null;
|
|
||||||
case GapBuildingState.Expired:
|
|
||||||
return 0;
|
|
||||||
case GapBuildingState.NotBuilding:
|
|
||||||
gapBuilding = <GapBuildingInfo_NotBuilding> this._gapBuilding;
|
|
||||||
break;
|
|
||||||
case GapBuildingState.Building:
|
|
||||||
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
const { gapCache, params } = gapBuilding;
|
|
||||||
const elapsedTime = Date.now() - params.initTimestamp;
|
|
||||||
return Math.max(gapCache.exposureDelayMs - elapsedTime, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Refresh the gaps caching logic (gaps are series of current delete markers
|
|
||||||
* in V0 bucket metadata format). It has two effects:
|
|
||||||
*
|
|
||||||
* - starts exposing existing and future gaps from the cache to efficiently
|
|
||||||
* skip over series of current delete markers that have been seen and cached
|
|
||||||
* earlier
|
|
||||||
*
|
|
||||||
* - enables building and caching new gaps (or extend existing ones), for a
|
|
||||||
* limited time period defined by the `gapCacheProxy.exposureDelayMs` value
|
|
||||||
* in milliseconds. To refresh the validity period and resume building and
|
|
||||||
* caching new gaps, one must restart a new listing from the database (starting
|
|
||||||
* at the current listing key, included), then call refreshGapCache() again.
|
|
||||||
*
|
|
||||||
* @param {GapCacheInterface} gapCacheProxy - API proxy to the gaps cache
|
|
||||||
* (the proxy should handle prefixing object keys with the bucket name)
|
|
||||||
* @param {number} [minGapWeight=100] - minimum weight of a gap for it to be
|
|
||||||
* added in the cache
|
|
||||||
* @param {number} [triggerSaveGapWeight] - cumulative weight to wait for
|
|
||||||
* before saving the current building gap. Cannot be greater than
|
|
||||||
* `gapCacheProxy.maxGapWeight` (the value is thresholded to `maxGapWeight`
|
|
||||||
* otherwise). Defaults to `gapCacheProxy.maxGapWeight / 2`.
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
refreshGapCache(
|
|
||||||
gapCacheProxy: GapCacheInterface,
|
|
||||||
minGapWeight?: number,
|
|
||||||
triggerSaveGapWeight?: number
|
|
||||||
): void {
|
|
||||||
if (this.vFormat !== BucketVersioningKeyFormat.v0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (this._gapCaching.state === GapCachingState.NoGapCache) {
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.UnknownGap,
|
|
||||||
gapCache: gapCacheProxy,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const refreshedBuildingParams: GapBuildingParams = {
|
|
||||||
minGapWeight: minGapWeight || 100,
|
|
||||||
triggerSaveGapWeight: triggerSaveGapWeight
|
|
||||||
|| Math.trunc(gapCacheProxy.maxGapWeight / 2),
|
|
||||||
initTimestamp: Date.now(),
|
|
||||||
};
|
|
||||||
if (this._gapBuilding.state === GapBuildingState.Building) {
|
|
||||||
// refreshed params will be applied as soon as the current building gap is saved
|
|
||||||
this._refreshedBuildingParams = refreshedBuildingParams;
|
|
||||||
} else {
|
|
||||||
this._gapBuilding = {
|
|
||||||
state: GapBuildingState.NotBuilding,
|
|
||||||
gapCache: gapCacheProxy,
|
|
||||||
params: refreshedBuildingParams,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Trigger a lookup of the closest upcoming or already skippable gap.
|
|
||||||
*
|
|
||||||
* @param {string} fromKey - lookup a gap not before 'fromKey'
|
|
||||||
* @return {undefined} - the lookup is asynchronous and its
|
|
||||||
* response is handled inside this function
|
|
||||||
*/
|
|
||||||
_triggerGapLookup(gapCaching: GapCachingInfo_NoCachedGap, fromKey: string): void {
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.GapLookupInProgress,
|
|
||||||
gapCache: gapCaching.gapCache,
|
|
||||||
};
|
|
||||||
const maxKey = this.prefix ? inc(this.prefix) : undefined;
|
|
||||||
gapCaching.gapCache.lookupGap(fromKey, maxKey).then(_gap => {
|
|
||||||
const gap = <GapSetEntry | null> _gap;
|
|
||||||
if (gap) {
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.GapCached,
|
|
||||||
gapCache: gapCaching.gapCache,
|
|
||||||
gapCached: gap,
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.NoMoreGap,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
_checkGapOnMasterDeleteMarker(key: string): FilterReturnValue {
|
|
||||||
switch (this._gapBuilding.state) {
|
|
||||||
case GapBuildingState.Disabled:
|
|
||||||
case GapBuildingState.Expired:
|
|
||||||
break;
|
|
||||||
case GapBuildingState.NotBuilding:
|
|
||||||
this._createBuildingGap(key, 1);
|
|
||||||
break;
|
|
||||||
case GapBuildingState.Building:
|
|
||||||
this._updateBuildingGap(key);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (this._gapCaching.state === GapCachingState.GapCached) {
|
|
||||||
const { gapCached } = this._gapCaching;
|
|
||||||
if (key >= gapCached.firstKey) {
|
|
||||||
if (key <= gapCached.lastKey) {
|
|
||||||
// we are inside the last looked up cached gap: transition to
|
|
||||||
// 'SkippingGapV0' state
|
|
||||||
this.setState(<DelimiterMasterFilterState_SkippingGapV0> {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingGapV0,
|
|
||||||
});
|
|
||||||
// cut the current gap before skipping, it will be merged or
|
|
||||||
// chained with the existing one (depending on its weight)
|
|
||||||
if (this._gapBuilding.state === GapBuildingState.Building) {
|
|
||||||
// substract 1 from the weight because we are going to chain this gap,
|
|
||||||
// which has an overlap of one key.
|
|
||||||
this._gapBuilding.gap.weight -= 1;
|
|
||||||
this._cutBuildingGap();
|
|
||||||
}
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
// as we are past the cached gap, we will need another lookup
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.UnknownGap,
|
|
||||||
gapCache: this._gapCaching.gapCache,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (this._gapCaching.state === GapCachingState.UnknownGap) {
|
|
||||||
this._triggerGapLookup(this._gapCaching, key);
|
|
||||||
}
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
filter_onNewMasterKeyV0(key: string, value: string): FilterReturnValue {
|
|
||||||
// if this master key is a delete marker, accept it without
|
|
||||||
// adding the version to the contents
|
|
||||||
if (Version.isDeleteMarker(value)) {
|
|
||||||
// update the state to start skipping versions of the new master key
|
|
||||||
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
|
||||||
masterKey: key,
|
|
||||||
});
|
|
||||||
return this._checkGapOnMasterDeleteMarker(key);
|
|
||||||
}
|
|
||||||
if (Version.isPHD(value)) {
|
|
||||||
// master version is a PHD version: wait for the first
|
|
||||||
// following version that will be considered as the actual
|
|
||||||
// master key
|
|
||||||
this.setState(<DelimiterMasterFilterState_WaitVersionAfterPHDV0> {
|
|
||||||
id: DelimiterMasterFilterStateId.WaitVersionAfterPHDV0,
|
|
||||||
masterKey: key,
|
|
||||||
});
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
// cut the current gap as soon as a non-deleted entry is seen
|
|
||||||
this._cutBuildingGap();
|
|
||||||
|
|
||||||
if (key.startsWith(DbPrefixes.Replay)) {
|
|
||||||
// skip internal replay prefix entirely
|
|
||||||
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
|
||||||
id: DelimiterFilterStateId.SkippingPrefix,
|
|
||||||
prefix: DbPrefixes.Replay,
|
|
||||||
});
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
if (this._reachedMaxKeys()) {
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
|
|
||||||
const commonPrefix = this.addCommonPrefixOrContents(key, value);
|
|
||||||
if (commonPrefix) {
|
|
||||||
// transition into SkippingPrefix state to skip all following keys
|
|
||||||
// while they start with the same prefix
|
|
||||||
this.setState(<DelimiterFilterState_SkippingPrefix> {
|
|
||||||
id: DelimiterFilterStateId.SkippingPrefix,
|
|
||||||
prefix: commonPrefix,
|
|
||||||
});
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
// update the state to start skipping versions of the new master key
|
|
||||||
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
|
||||||
masterKey: key,
|
|
||||||
});
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_NotSkippingPrefixNorVersionsV0(key: string, value: string): FilterReturnValue {
|
|
||||||
return this.filter_onNewMasterKeyV0(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
filter_onNewMasterKeyV1(key: string, value: string): FilterReturnValue {
|
|
||||||
// if this master key is a delete marker, accept it without
|
|
||||||
// adding the version to the contents
|
|
||||||
if (Version.isDeleteMarker(value)) {
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
// use base Delimiter's implementation
|
|
||||||
return this.keyHandler_NotSkipping_Delimiter(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_NotSkippingPrefixNorVersionsV1(key: string, value: string): FilterReturnValue {
|
|
||||||
return this.filter_onNewMasterKeyV1(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_SkippingVersionsV0(key: string, value: string): FilterReturnValue {
|
|
||||||
/* In the SkippingVersionsV0 state, skip all version keys
|
|
||||||
* (<key><versionIdSeparator><version>) */
|
|
||||||
const versionIdIndex = key.indexOf(VID_SEP);
|
|
||||||
if (versionIdIndex !== -1) {
|
|
||||||
// version keys count in the building gap weight because they must
|
|
||||||
// also be listed until skipped
|
|
||||||
if (this._gapBuilding.state === GapBuildingState.Building) {
|
|
||||||
this._updateBuildingGap(key);
|
|
||||||
}
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
return this.filter_onNewMasterKeyV0(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_WaitVersionAfterPHDV0(key: string, value: string): FilterReturnValue {
|
|
||||||
// After a PHD key is encountered, the next version key of the
|
|
||||||
// same object if it exists is the new master key, hence
|
|
||||||
// consider it as such and call 'onNewMasterKeyV0' (the test
|
|
||||||
// 'masterKey == phdKey' is probably redundant when we already
|
|
||||||
// know we have a versioned key, since all objects in v0 have
|
|
||||||
// a master key, but keeping it in doubt)
|
|
||||||
const { masterKey: phdKey } = <DelimiterMasterFilterState_WaitVersionAfterPHDV0> this.state;
|
|
||||||
const versionIdIndex = key.indexOf(VID_SEP);
|
|
||||||
if (versionIdIndex !== -1) {
|
|
||||||
const masterKey = key.slice(0, versionIdIndex);
|
|
||||||
if (masterKey === phdKey) {
|
|
||||||
return this.filter_onNewMasterKeyV0(masterKey, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return this.filter_onNewMasterKeyV0(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
keyHandler_SkippingGapV0(key: string, value: string): FilterReturnValue {
|
|
||||||
const { gapCache, gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
|
|
||||||
if (key <= gapCached.lastKey) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
this._gapCaching = {
|
|
||||||
state: GapCachingState.UnknownGap,
|
|
||||||
gapCache,
|
|
||||||
};
|
|
||||||
this.setState(<DelimiterMasterFilterState_SkippingVersionsV0> {
|
|
||||||
id: DelimiterMasterFilterStateId.SkippingVersionsV0,
|
|
||||||
});
|
|
||||||
// Start a gap with weight=0 from the latest skippable key. This will
|
|
||||||
// allow to extend the gap just skipped with a chained gap in case
|
|
||||||
// other delete markers are seen after the existing gap is skipped.
|
|
||||||
this._createBuildingGap(gapCached.lastKey, 0, gapCached.weight);
|
|
||||||
|
|
||||||
return this.handleKey(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
skippingBase(): string | undefined {
|
|
||||||
switch (this.state.id) {
|
|
||||||
case DelimiterMasterFilterStateId.SkippingVersionsV0:
|
|
||||||
const { masterKey } = <DelimiterMasterFilterState_SkippingVersionsV0> this.state;
|
|
||||||
return masterKey + inc(VID_SEP);
|
|
||||||
|
|
||||||
case DelimiterMasterFilterStateId.SkippingGapV0:
|
|
||||||
const { gapCached } = <GapCachingInfo_GapCached> this._gapCaching;
|
|
||||||
return gapCached.lastKey;
|
|
||||||
|
|
||||||
default:
|
|
||||||
return super.skippingBase();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result(): ResultObject {
|
|
||||||
this._cutBuildingGap();
|
|
||||||
return super.result();
|
|
||||||
}
|
|
||||||
|
|
||||||
_checkRefreshedBuildingParams(params: GapBuildingParams): GapBuildingParams {
|
|
||||||
if (this._refreshedBuildingParams) {
|
|
||||||
const newParams = this._refreshedBuildingParams;
|
|
||||||
this._refreshedBuildingParams = null;
|
|
||||||
return newParams;
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Save the gap being built if allowed (i.e. still within the
|
|
||||||
* allocated exposure time window).
|
|
||||||
*
|
|
||||||
* @return {boolean} - true if the gap was saved, false if we are
|
|
||||||
* outside the allocated exposure time window.
|
|
||||||
*/
|
|
||||||
_saveBuildingGap(): boolean {
|
|
||||||
const { gapCache, params, gap, gapWeight } =
|
|
||||||
<GapBuildingInfo_Building> this._gapBuilding;
|
|
||||||
const totalElapsed = Date.now() - params.initTimestamp;
|
|
||||||
if (totalElapsed >= gapCache.exposureDelayMs) {
|
|
||||||
this._gapBuilding = {
|
|
||||||
state: GapBuildingState.Expired,
|
|
||||||
};
|
|
||||||
this._refreshedBuildingParams = null;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const { firstKey, lastKey, weight } = gap;
|
|
||||||
gapCache.setGap(firstKey, lastKey, weight);
|
|
||||||
this._gapBuilding = {
|
|
||||||
state: GapBuildingState.Building,
|
|
||||||
gapCache,
|
|
||||||
params: this._checkRefreshedBuildingParams(params),
|
|
||||||
gap: {
|
|
||||||
firstKey: gap.lastKey,
|
|
||||||
lastKey: gap.lastKey,
|
|
||||||
weight: 0,
|
|
||||||
},
|
|
||||||
gapWeight,
|
|
||||||
};
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new gap to be extended afterwards
|
|
||||||
*
|
|
||||||
* @param {string} newKey - gap's first key
|
|
||||||
* @param {number} startWeight - initial weight of the building gap (usually 0 or 1)
|
|
||||||
* @param {number} [cachedWeight] - if continuing a cached gap, weight of the existing
|
|
||||||
* cached portion
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
_createBuildingGap(newKey: string, startWeight: number, cachedWeight?: number): void {
|
|
||||||
if (this._gapBuilding.state === GapBuildingState.NotBuilding) {
|
|
||||||
const { gapCache, params } = <GapBuildingInfo_NotBuilding> this._gapBuilding;
|
|
||||||
this._gapBuilding = {
|
|
||||||
state: GapBuildingState.Building,
|
|
||||||
gapCache,
|
|
||||||
params: this._checkRefreshedBuildingParams(params),
|
|
||||||
gap: {
|
|
||||||
firstKey: newKey,
|
|
||||||
lastKey: newKey,
|
|
||||||
weight: startWeight,
|
|
||||||
},
|
|
||||||
gapWeight: (cachedWeight || 0) + startWeight,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_updateBuildingGap(newKey: string): void {
|
|
||||||
const gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
|
||||||
const { params, gap } = gapBuilding;
|
|
||||||
gap.lastKey = newKey;
|
|
||||||
gap.weight += 1;
|
|
||||||
gapBuilding.gapWeight += 1;
|
|
||||||
// the GapCache API requires updating a gap regularly because it can only split
|
|
||||||
// it once per update, by the known last key. In practice the default behavior
|
|
||||||
// is to trigger an update after a number of keys that is half the maximum weight.
|
|
||||||
// It is also useful for other listings to benefit from the cache sooner.
|
|
||||||
if (gapBuilding.gapWeight >= params.minGapWeight &&
|
|
||||||
gap.weight >= params.triggerSaveGapWeight) {
|
|
||||||
this._saveBuildingGap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_cutBuildingGap(): void {
|
|
||||||
if (this._gapBuilding.state === GapBuildingState.Building) {
|
|
||||||
let gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
|
||||||
let { gapCache, params, gap, gapWeight } = gapBuilding;
|
|
||||||
// only set gaps that are significant enough in weight and
|
|
||||||
// with a non-empty extension
|
|
||||||
if (gapWeight >= params.minGapWeight && gap.weight > 0) {
|
|
||||||
// we're done if we were not allowed to save the gap
|
|
||||||
if (!this._saveBuildingGap()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// params may have been refreshed, reload them
|
|
||||||
gapBuilding = <GapBuildingInfo_Building> this._gapBuilding;
|
|
||||||
params = gapBuilding.params;
|
|
||||||
}
|
|
||||||
this._gapBuilding = {
|
|
||||||
state: GapBuildingState.NotBuilding,
|
|
||||||
gapCache,
|
|
||||||
params,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,202 +0,0 @@
|
||||||
const { DelimiterVersions } = require('./delimiterVersions');
|
|
||||||
const { FILTER_END, FILTER_SKIP } = require('./tools');
|
|
||||||
|
|
||||||
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters. This extends the base class DelimiterVersions
|
|
||||||
* to return the raw non-current versions objects.
|
|
||||||
*/
|
|
||||||
class DelimiterNonCurrent extends DelimiterVersions {
|
|
||||||
/**
|
|
||||||
* Delimiter listing of non-current versions.
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} parameters.keyMarker - key marker
|
|
||||||
* @param {String} parameters.versionIdMarker - version id marker
|
|
||||||
* @param {String} parameters.beforeDate - limit the response to keys with stale date older than beforeDate.
|
|
||||||
* “stale date” is the date on when a version becomes non-current.
|
|
||||||
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
|
||||||
* @param {String} parameters.excludedDataStoreName - exclude dataStoreName matches from the versions
|
|
||||||
* @param {RequestLogger} logger - The logger of the request
|
|
||||||
* @param {String} [vFormat] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
super(parameters, logger, vFormat);
|
|
||||||
|
|
||||||
this.beforeDate = parameters.beforeDate;
|
|
||||||
this.excludedDataStoreName = parameters.excludedDataStoreName;
|
|
||||||
this.maxScannedLifecycleListingEntries = parameters.maxScannedLifecycleListingEntries;
|
|
||||||
|
|
||||||
// internal state
|
|
||||||
this.prevKey = null;
|
|
||||||
this.staleDate = null;
|
|
||||||
|
|
||||||
this.scannedKeys = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
getLastModified(value) {
|
|
||||||
let lastModified;
|
|
||||||
try {
|
|
||||||
const v = JSON.parse(value);
|
|
||||||
lastModified = v['last-modified'];
|
|
||||||
} catch (e) {
|
|
||||||
this.logger.warn('could not parse Object Metadata while listing',
|
|
||||||
{
|
|
||||||
method: 'getLastModified',
|
|
||||||
err: e.toString(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return lastModified;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Overwrite keyHandler_SkippingVersions to include the last version from the previous listing.
|
|
||||||
// The creation (last-modified) date of this version will be the stale date for the following version.
|
|
||||||
// eslint-disable-next-line camelcase
|
|
||||||
keyHandler_SkippingVersions(key, versionId, value) {
|
|
||||||
if (key === this.keyMarker) {
|
|
||||||
// since the nonversioned key equals the marker, there is
|
|
||||||
// necessarily a versionId in this key
|
|
||||||
const _versionId = versionId;
|
|
||||||
if (_versionId < this.versionIdMarker) {
|
|
||||||
// skip all versions until marker
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this.setState({
|
|
||||||
id: 1 /* NotSkipping */,
|
|
||||||
});
|
|
||||||
return this.handleKey(key, versionId, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
filter(obj) {
|
|
||||||
if (this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries) {
|
|
||||||
this.IsTruncated = true;
|
|
||||||
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
|
||||||
{
|
|
||||||
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
|
||||||
scannedKeys: this.scannedKeys,
|
|
||||||
});
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
++this.scannedKeys;
|
|
||||||
return super.filter(obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
|
||||||
* thanks to the way version ids are generated.
|
|
||||||
* DESCRIPTION: Skip the version if it represents the master key, but keep its last-modified date in memory,
|
|
||||||
* which will be the stale date of the following version.
|
|
||||||
* The following version is pushed only:
|
|
||||||
* - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed),
|
|
||||||
* - if "beforeDate" is not specified or if specified and the "stale date" is older.
|
|
||||||
* - if "excludedDataStoreName" is not specified or if specified and the data store name is different
|
|
||||||
* The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
|
||||||
* the following version.
|
|
||||||
* The process stops and returns the available results if either:
|
|
||||||
* - no more metadata key is left to be processed
|
|
||||||
* - the listing reaches the maximum number of key to be returned
|
|
||||||
* - the internal timeout is reached
|
|
||||||
* @param {String} key - The key to add
|
|
||||||
* @param {String} versionId - The version id
|
|
||||||
* @param {String} value - The value of the key
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addVersion(key, versionId, value) {
|
|
||||||
this.nextKeyMarker = key;
|
|
||||||
this.nextVersionIdMarker = versionId;
|
|
||||||
|
|
||||||
// Skip the version if it represents the non-current version, but keep its last-modified date,
|
|
||||||
// which will be the stale date of the following version.
|
|
||||||
const isCurrentVersion = key !== this.prevKey;
|
|
||||||
if (isCurrentVersion) {
|
|
||||||
this.staleDate = this.getLastModified(value);
|
|
||||||
this.prevKey = key;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// The following version is pushed only:
|
|
||||||
// - if the "stale date" (picked up from the previous version) is available (JSON.parse has not failed),
|
|
||||||
// - if "beforeDate" is not specified or if specified and the "stale date" is older.
|
|
||||||
// - if "excludedDataStoreName" is not specified or if specified and the data store name is different
|
|
||||||
let lastModified;
|
|
||||||
if (this.staleDate && (!this.beforeDate || this.staleDate < this.beforeDate)) {
|
|
||||||
const parsedValue = this._parse(value);
|
|
||||||
// if parsing fails, skip the key.
|
|
||||||
if (parsedValue) {
|
|
||||||
const dataStoreName = parsedValue.dataStoreName;
|
|
||||||
lastModified = parsedValue['last-modified'];
|
|
||||||
if (!this.excludedDataStoreName || dataStoreName !== this.excludedDataStoreName) {
|
|
||||||
const s = this._stringify(parsedValue, this.staleDate);
|
|
||||||
// check that _stringify succeeds to only push objects with a defined staleDate.
|
|
||||||
if (s) {
|
|
||||||
this.Versions.push({ key, value: s });
|
|
||||||
++this.keys;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// The in-memory "stale date" is then updated with the version's last-modified date to be used for
|
|
||||||
// the following version.
|
|
||||||
this.staleDate = lastModified || this.getLastModified(value);
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parses the stringified entry's value and remove the location property if too large.
|
|
||||||
* @param {string} s - sringified value
|
|
||||||
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
|
|
||||||
*/
|
|
||||||
_parse(s) {
|
|
||||||
let p;
|
|
||||||
try {
|
|
||||||
p = JSON.parse(s);
|
|
||||||
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
|
||||||
delete p.location;
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
this.logger.warn('Could not parse Object Metadata while listing', {
|
|
||||||
method: 'DelimiterNonCurrent._parse',
|
|
||||||
err: e.toString(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return p;
|
|
||||||
}
|
|
||||||
|
|
||||||
_stringify(parsedMD, staleDate) {
|
|
||||||
const p = parsedMD;
|
|
||||||
let s = undefined;
|
|
||||||
p.staleDate = staleDate;
|
|
||||||
try {
|
|
||||||
s = JSON.stringify(p);
|
|
||||||
} catch (e) {
|
|
||||||
this.logger.warn('could not stringify Object Metadata while listing', {
|
|
||||||
method: 'DelimiterNonCurrent._stringify',
|
|
||||||
err: e.toString(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return s;
|
|
||||||
}
|
|
||||||
|
|
||||||
result() {
|
|
||||||
const { Versions, IsTruncated, NextKeyMarker, NextVersionIdMarker } = super.result();
|
|
||||||
|
|
||||||
const result = {
|
|
||||||
Contents: Versions,
|
|
||||||
IsTruncated,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (NextKeyMarker) {
|
|
||||||
result.NextKeyMarker = NextKeyMarker;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (NextVersionIdMarker) {
|
|
||||||
result.NextVersionIdMarker = NextVersionIdMarker;
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
module.exports = { DelimiterNonCurrent };
|
|
|
@ -1,204 +0,0 @@
|
||||||
const DelimiterVersions = require('./delimiterVersions').DelimiterVersions;
|
|
||||||
const { FILTER_END } = require('./tools');
|
|
||||||
const TRIM_METADATA_MIN_BLOB_SIZE = 10000;
|
|
||||||
/**
|
|
||||||
* Handle object listing with parameters. This extends the base class DelimiterVersions
|
|
||||||
* to return the orphan delete markers. Orphan delete markers are also
|
|
||||||
* refered as expired object delete marker.
|
|
||||||
* They are delete marker with zero noncurrent versions.
|
|
||||||
*/
|
|
||||||
class DelimiterOrphanDeleteMarker extends DelimiterVersions {
|
|
||||||
/**
|
|
||||||
* Delimiter listing of orphan delete markers.
|
|
||||||
* @param {Object} parameters - listing parameters
|
|
||||||
* @param {String} parameters.beforeDate - limit the response to keys older than beforeDate
|
|
||||||
* @param {Number} parameters.maxScannedLifecycleListingEntries - max number of entries to be scanned
|
|
||||||
* @param {RequestLogger} logger - The logger of the request
|
|
||||||
* @param {String} [vFormat] - versioning key format
|
|
||||||
*/
|
|
||||||
constructor(parameters, logger, vFormat) {
|
|
||||||
const {
|
|
||||||
marker,
|
|
||||||
maxKeys,
|
|
||||||
prefix,
|
|
||||||
beforeDate,
|
|
||||||
maxScannedLifecycleListingEntries,
|
|
||||||
} = parameters;
|
|
||||||
|
|
||||||
const versionParams = {
|
|
||||||
// The orphan delete marker logic uses the term 'marker' instead of 'keyMarker',
|
|
||||||
// as the latter could suggest the presence of a 'versionIdMarker'.
|
|
||||||
keyMarker: marker,
|
|
||||||
maxKeys,
|
|
||||||
prefix,
|
|
||||||
};
|
|
||||||
super(versionParams, logger, vFormat);
|
|
||||||
|
|
||||||
this.maxScannedLifecycleListingEntries = maxScannedLifecycleListingEntries;
|
|
||||||
this.beforeDate = beforeDate;
|
|
||||||
// this.prevKeyName is used as a marker for the next listing when the current one reaches its entry limit.
|
|
||||||
// We cannot rely on this.keyName, as it contains the name of the current key.
|
|
||||||
// In the event of a listing interruption due to reaching the maximum scanned entries,
|
|
||||||
// relying on this.keyName would cause the next listing to skip the current key because S3 starts
|
|
||||||
// listing after the marker.
|
|
||||||
this.prevKeyName = null;
|
|
||||||
this.keyName = null;
|
|
||||||
this.value = null;
|
|
||||||
this.scannedKeys = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
_reachedMaxKeys() {
|
|
||||||
if (this.keys >= this.maxKeys) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
_addOrphan() {
|
|
||||||
const parsedValue = this._parse(this.value);
|
|
||||||
// if parsing fails, skip the key.
|
|
||||||
if (parsedValue) {
|
|
||||||
const lastModified = parsedValue['last-modified'];
|
|
||||||
const isDeleteMarker = parsedValue.isDeleteMarker;
|
|
||||||
// We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
|
||||||
if ((!this.beforeDate || (lastModified && lastModified < this.beforeDate)) && isDeleteMarker) {
|
|
||||||
// Prefer returning an untrimmed data rather than stopping the service in case of parsing failure.
|
|
||||||
const s = this._stringify(parsedValue) || this.value;
|
|
||||||
this.Versions.push({ key: this.keyName, value: s });
|
|
||||||
this.nextKeyMarker = this.keyName;
|
|
||||||
++this.keys;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parses the stringified entry's value and remove the location property if too large.
|
|
||||||
* @param {string} s - sringified value
|
|
||||||
* @return {object} p - undefined if parsing fails, otherwise it contains the parsed value.
|
|
||||||
*/
|
|
||||||
_parse(s) {
|
|
||||||
let p;
|
|
||||||
try {
|
|
||||||
p = JSON.parse(s);
|
|
||||||
if (s.length >= TRIM_METADATA_MIN_BLOB_SIZE) {
|
|
||||||
delete p.location;
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
this.logger.warn('Could not parse Object Metadata while listing', {
|
|
||||||
method: 'DelimiterOrphanDeleteMarker._parse',
|
|
||||||
err: e.toString(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return p;
|
|
||||||
}
|
|
||||||
|
|
||||||
_stringify(value) {
|
|
||||||
const p = value;
|
|
||||||
let s = undefined;
|
|
||||||
try {
|
|
||||||
s = JSON.stringify(p);
|
|
||||||
} catch (e) {
|
|
||||||
this.logger.warn('could not stringify Object Metadata while listing',
|
|
||||||
{
|
|
||||||
method: 'DelimiterOrphanDeleteMarker._stringify',
|
|
||||||
err: e.toString(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return s;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* The purpose of _isMaxScannedEntriesReached is to restrict the number of scanned entries,
|
|
||||||
* thus controlling resource overhead (CPU...).
|
|
||||||
* @return {boolean} isMaxScannedEntriesReached - true if the maximum limit on the number
|
|
||||||
* of entries scanned has been reached, false otherwise.
|
|
||||||
*/
|
|
||||||
_isMaxScannedEntriesReached() {
|
|
||||||
return this.maxScannedLifecycleListingEntries && this.scannedKeys >= this.maxScannedLifecycleListingEntries;
|
|
||||||
}
|
|
||||||
|
|
||||||
filter(obj) {
|
|
||||||
if (this._isMaxScannedEntriesReached()) {
|
|
||||||
this.nextKeyMarker = this.prevKeyName;
|
|
||||||
this.IsTruncated = true;
|
|
||||||
this.logger.info('listing stopped due to reaching the maximum scanned entries limit',
|
|
||||||
{
|
|
||||||
maxScannedLifecycleListingEntries: this.maxScannedLifecycleListingEntries,
|
|
||||||
scannedKeys: this.scannedKeys,
|
|
||||||
});
|
|
||||||
return FILTER_END;
|
|
||||||
}
|
|
||||||
++this.scannedKeys;
|
|
||||||
return super.filter(obj);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* NOTE: Each version of a specific key is sorted from the latest to the oldest
|
|
||||||
* thanks to the way version ids are generated.
|
|
||||||
* DESCRIPTION: For a given key, the latest version is kept in memory since it is the current version.
|
|
||||||
* If the following version reference a new key, it means that the previous one was an orphan version.
|
|
||||||
* We then check if the orphan version is a delete marker and if it is older than the "beforeDate"
|
|
||||||
* The process stops and returns the available results if either:
|
|
||||||
* - no more metadata key is left to be processed
|
|
||||||
* - the listing reaches the maximum number of key to be returned
|
|
||||||
* - the internal timeout is reached
|
|
||||||
* NOTE: we cannot leverage MongoDB to list keys older than "beforeDate"
|
|
||||||
* because then we will not be able to assess its orphanage.
|
|
||||||
* @param {String} key - The object key.
|
|
||||||
* @param {String} versionId - The object version id.
|
|
||||||
* @param {String} value - The value of the key
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addVersion(key, versionId, value) {
|
|
||||||
// For a given key, the youngest version is kept in memory since it represents the current version.
|
|
||||||
if (key !== this.keyName) {
|
|
||||||
// If this.value is defined, it means that <this.keyName, this.value> pair is "allowed" to be an orphan.
|
|
||||||
if (this.value) {
|
|
||||||
this._addOrphan();
|
|
||||||
}
|
|
||||||
this.prevKeyName = this.keyName;
|
|
||||||
this.keyName = key;
|
|
||||||
this.value = value;
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the key is not the current version, we can skip it in the next listing
|
|
||||||
// in the case where the current listing is interrupted due to reaching the maximum scanned entries.
|
|
||||||
this.prevKeyName = key;
|
|
||||||
this.keyName = key;
|
|
||||||
this.value = null;
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
result() {
|
|
||||||
// Only check for remaining last orphan delete marker if the listing is not interrupted.
|
|
||||||
// This will help avoid false positives.
|
|
||||||
if (!this._isMaxScannedEntriesReached()) {
|
|
||||||
// The following check makes sure the last orphan delete marker is not forgotten.
|
|
||||||
if (this.keys < this.maxKeys) {
|
|
||||||
if (this.value) {
|
|
||||||
this._addOrphan();
|
|
||||||
}
|
|
||||||
// The following make sure that if makeKeys is reached, isTruncated is set to true.
|
|
||||||
// We moved the "isTruncated" from _reachedMaxKeys to make sure we take into account the last entity
|
|
||||||
// if listing is truncated right before the last entity and the last entity is a orphan delete marker.
|
|
||||||
} else {
|
|
||||||
this.IsTruncated = this.maxKeys > 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = {
|
|
||||||
Contents: this.Versions,
|
|
||||||
IsTruncated: this.IsTruncated,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.IsTruncated) {
|
|
||||||
result.NextMarker = this.nextKeyMarker;
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { DelimiterOrphanDeleteMarker };
|
|
|
@ -1,12 +1,6 @@
|
||||||
'use strict'; // eslint-disable-line strict
|
'use strict'; // eslint-disable-line strict
|
||||||
|
|
||||||
const Extension = require('./Extension').default;
|
const Delimiter = require('./delimiter').Delimiter;
|
||||||
|
|
||||||
import {
|
|
||||||
FilterState,
|
|
||||||
FilterReturnValue,
|
|
||||||
} from './delimiter';
|
|
||||||
|
|
||||||
const Version = require('../../versioning/Version').Version;
|
const Version = require('../../versioning/Version').Version;
|
||||||
const VSConst = require('../../versioning/constants').VersioningConstants;
|
const VSConst = require('../../versioning/constants').VersioningConstants;
|
||||||
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
||||||
|
@ -15,10 +9,24 @@ const { inc, FILTER_END, FILTER_ACCEPT, FILTER_SKIP, SKIP_NONE } =
|
||||||
const VID_SEP = VSConst.VersionId.Separator;
|
const VID_SEP = VSConst.VersionId.Separator;
|
||||||
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
const { DbPrefixes, BucketVersioningKeyFormat } = VSConst;
|
||||||
|
|
||||||
|
// TODO: when S3C-4682 code is back, cleanup fields, methods and types
|
||||||
|
// already present in Delimiter class
|
||||||
|
|
||||||
|
export interface FilterState {
|
||||||
|
id: number,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface FilterReturnValue {
|
||||||
|
FILTER_ACCEPT,
|
||||||
|
FILTER_SKIP,
|
||||||
|
FILTER_END,
|
||||||
|
};
|
||||||
|
|
||||||
export const enum DelimiterVersionsFilterStateId {
|
export const enum DelimiterVersionsFilterStateId {
|
||||||
NotSkipping = 1,
|
NotSkipping = 1,
|
||||||
SkippingPrefix = 2,
|
SkippingPrefix = 2,
|
||||||
SkippingVersions = 3,
|
WaitForNullKey = 3,
|
||||||
|
SkippingVersions = 4,
|
||||||
};
|
};
|
||||||
|
|
||||||
export interface DelimiterVersionsFilterState_NotSkipping extends FilterState {
|
export interface DelimiterVersionsFilterState_NotSkipping extends FilterState {
|
||||||
|
@ -30,12 +38,16 @@ export interface DelimiterVersionsFilterState_SkippingPrefix extends FilterState
|
||||||
prefix: string;
|
prefix: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export interface DelimiterVersionsFilterState_WaitForNullKey extends FilterState {
|
||||||
|
id: DelimiterVersionsFilterStateId.WaitForNullKey,
|
||||||
|
};
|
||||||
|
|
||||||
export interface DelimiterVersionsFilterState_SkippingVersions extends FilterState {
|
export interface DelimiterVersionsFilterState_SkippingVersions extends FilterState {
|
||||||
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
||||||
gt: string;
|
gt: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
type KeyHandler = (key: string, versionId: string | undefined, value: string) => FilterReturnValue;
|
type KeyHandler = (key: string, value: string) => FilterReturnValue;
|
||||||
|
|
||||||
type ResultObject = {
|
type ResultObject = {
|
||||||
CommonPrefixes: string[],
|
CommonPrefixes: string[],
|
||||||
|
@ -68,17 +80,13 @@ type GenMDParamsItem = {
|
||||||
* @prop {String|undefined} prefix - prefix per amazon format
|
* @prop {String|undefined} prefix - prefix per amazon format
|
||||||
* @prop {Number} maxKeys - number of keys to list
|
* @prop {Number} maxKeys - number of keys to list
|
||||||
*/
|
*/
|
||||||
export class DelimiterVersions extends Extension {
|
export class DelimiterVersions extends Delimiter {
|
||||||
|
|
||||||
state: FilterState;
|
state: FilterState;
|
||||||
keyHandlers: { [id: number]: KeyHandler };
|
keyHandlers: { [id: number]: KeyHandler };
|
||||||
|
|
||||||
constructor(parameters, logger, vFormat) {
|
constructor(parameters, logger, vFormat) {
|
||||||
super(parameters, logger);
|
super(parameters, logger, vFormat);
|
||||||
// original listing parameters
|
|
||||||
this.delimiter = parameters.delimiter;
|
|
||||||
this.prefix = parameters.prefix;
|
|
||||||
this.maxKeys = parameters.maxKeys || 1000;
|
|
||||||
// specific to version listing
|
// specific to version listing
|
||||||
this.keyMarker = parameters.keyMarker;
|
this.keyMarker = parameters.keyMarker;
|
||||||
this.versionIdMarker = parameters.versionIdMarker;
|
this.versionIdMarker = parameters.versionIdMarker;
|
||||||
|
@ -86,11 +94,7 @@ export class DelimiterVersions extends Extension {
|
||||||
this.masterKey = undefined;
|
this.masterKey = undefined;
|
||||||
this.masterVersionId = undefined;
|
this.masterVersionId = undefined;
|
||||||
this.nullKey = null;
|
this.nullKey = null;
|
||||||
this.vFormat = vFormat || BucketVersioningKeyFormat.v0;
|
|
||||||
// listing results
|
// listing results
|
||||||
this.CommonPrefixes = [];
|
|
||||||
this.Versions = [];
|
|
||||||
this.IsTruncated = false;
|
|
||||||
this.nextKeyMarker = parameters.keyMarker;
|
this.nextKeyMarker = parameters.keyMarker;
|
||||||
this.nextVersionIdMarker = undefined;
|
this.nextVersionIdMarker = undefined;
|
||||||
|
|
||||||
|
@ -122,14 +126,17 @@ export class DelimiterVersions extends Extension {
|
||||||
DelimiterVersionsFilterStateId.SkippingPrefix,
|
DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
this.keyHandler_SkippingPrefix.bind(this));
|
this.keyHandler_SkippingPrefix.bind(this));
|
||||||
|
|
||||||
|
this.setKeyHandler(
|
||||||
|
DelimiterVersionsFilterStateId.WaitForNullKey,
|
||||||
|
this.keyHandler_WaitForNullKey.bind(this));
|
||||||
|
|
||||||
this.setKeyHandler(
|
this.setKeyHandler(
|
||||||
DelimiterVersionsFilterStateId.SkippingVersions,
|
DelimiterVersionsFilterStateId.SkippingVersions,
|
||||||
this.keyHandler_SkippingVersions.bind(this));
|
this.keyHandler_SkippingVersions.bind(this));
|
||||||
|
|
||||||
if (this.versionIdMarker) {
|
if (this.versionIdMarker) {
|
||||||
this.state = <DelimiterVersionsFilterState_SkippingVersions> {
|
this.state = <DelimiterVersionsFilterState_WaitForNullKey> {
|
||||||
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
id: DelimiterVersionsFilterStateId.WaitForNullKey,
|
||||||
gt: `${this.keyMarker}${VID_SEP}${this.versionIdMarker}`,
|
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
this.state = <DelimiterVersionsFilterState_NotSkipping> {
|
this.state = <DelimiterVersionsFilterState_NotSkipping> {
|
||||||
|
@ -194,20 +201,6 @@ export class DelimiterVersions extends Extension {
|
||||||
return [mParams, vParams];
|
return [mParams, vParams];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* check if the max keys count has been reached and set the
|
|
||||||
* final state of the result if it is the case
|
|
||||||
* @return {Boolean} - indicates if the iteration has to stop
|
|
||||||
*/
|
|
||||||
_reachedMaxKeys(): boolean {
|
|
||||||
if (this.keys >= this.maxKeys) {
|
|
||||||
// In cases of maxKeys <= 0 -> IsTruncated = false
|
|
||||||
this.IsTruncated = this.maxKeys > 0;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Used to synchronize listing of M and V prefixes by object key
|
* Used to synchronize listing of M and V prefixes by object key
|
||||||
*
|
*
|
||||||
|
@ -243,30 +236,6 @@ export class DelimiterVersions extends Extension {
|
||||||
return { key: nonversionedKey, versionId };
|
return { key: nonversionedKey, versionId };
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Include a key in the listing output, in the Versions or CommonPrefix result
|
|
||||||
*
|
|
||||||
* @param {string} key - key (without version ID)
|
|
||||||
* @param {string} versionId - version ID
|
|
||||||
* @param {string} value - metadata value
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
addKey(key: string, versionId: string, value: string) {
|
|
||||||
// add the subprefix to the common prefixes if the key has the delimiter
|
|
||||||
const commonPrefix = this.getCommonPrefix(key);
|
|
||||||
if (commonPrefix) {
|
|
||||||
this.addCommonPrefix(commonPrefix);
|
|
||||||
// transition into SkippingPrefix state to skip all following keys
|
|
||||||
// while they start with the same prefix
|
|
||||||
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
|
||||||
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
|
||||||
prefix: commonPrefix,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
this.addVersion(key, versionId, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add a (key, versionId, value) tuple to the listing.
|
* Add a (key, versionId, value) tuple to the listing.
|
||||||
* Set the NextMarker to the current key
|
* Set the NextMarker to the current key
|
||||||
|
@ -276,8 +245,8 @@ export class DelimiterVersions extends Extension {
|
||||||
* @param {String} value - The value of the key
|
* @param {String} value - The value of the key
|
||||||
* @return {undefined}
|
* @return {undefined}
|
||||||
*/
|
*/
|
||||||
addVersion(key: string, versionId: string, value: string) {
|
addContents(key: string, versionId: string, value: string) {
|
||||||
this.Versions.push({
|
this.Contents.push({
|
||||||
key,
|
key,
|
||||||
versionId,
|
versionId,
|
||||||
value: this.trimMetadata(value),
|
value: this.trimMetadata(value),
|
||||||
|
@ -288,9 +257,6 @@ export class DelimiterVersions extends Extension {
|
||||||
}
|
}
|
||||||
|
|
||||||
getCommonPrefix(key: string): string | undefined {
|
getCommonPrefix(key: string): string | undefined {
|
||||||
if (!this.delimiter) {
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const baseIndex = this.prefix ? this.prefix.length : 0;
|
const baseIndex = this.prefix ? this.prefix.length : 0;
|
||||||
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
const delimiterIndex = key.indexOf(this.delimiter, baseIndex);
|
||||||
if (delimiterIndex === -1) {
|
if (delimiterIndex === -1) {
|
||||||
|
@ -309,7 +275,6 @@ export class DelimiterVersions extends Extension {
|
||||||
this.CommonPrefixes.push(commonPrefix);
|
this.CommonPrefixes.push(commonPrefix);
|
||||||
++this.keys;
|
++this.keys;
|
||||||
this.nextKeyMarker = commonPrefix;
|
this.nextKeyMarker = commonPrefix;
|
||||||
this.nextVersionIdMarker = undefined;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -325,6 +290,21 @@ export class DelimiterVersions extends Extension {
|
||||||
this.nullKey = { key, versionId, value };
|
this.nullKey = { key, versionId, value };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add the cached null key to the results. This is called when
|
||||||
|
* reaching the correct position for the null key in the output.
|
||||||
|
*
|
||||||
|
* @return {undefined}
|
||||||
|
*/
|
||||||
|
addCurrentNullKey(): void {
|
||||||
|
this.addContents(
|
||||||
|
this.nullKey.key,
|
||||||
|
this.nullKey.versionId,
|
||||||
|
this.nullKey.value,
|
||||||
|
);
|
||||||
|
this.nullKey = null;
|
||||||
|
}
|
||||||
|
|
||||||
getObjectKeyV0(obj: { key: string }): string {
|
getObjectKeyV0(obj: { key: string }): string {
|
||||||
return obj.key;
|
return obj.key;
|
||||||
}
|
}
|
||||||
|
@ -348,24 +328,7 @@ export class DelimiterVersions extends Extension {
|
||||||
const key = this.getObjectKey(obj);
|
const key = this.getObjectKey(obj);
|
||||||
const value = obj.value;
|
const value = obj.value;
|
||||||
|
|
||||||
const { key: nonversionedKey, versionId: keyVersionId } = this.parseKey(key);
|
return this.handleKey(key, value);
|
||||||
if (this.nullKey) {
|
|
||||||
if (this.nullKey.key !== nonversionedKey
|
|
||||||
|| this.nullKey.versionId < <string> keyVersionId) {
|
|
||||||
this.handleKey(
|
|
||||||
this.nullKey.key, this.nullKey.versionId, this.nullKey.value);
|
|
||||||
this.nullKey = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (keyVersionId === '') {
|
|
||||||
// null key
|
|
||||||
this.cacheNullKey(nonversionedKey, Version.from(value).getVersionId(), value);
|
|
||||||
if (this.state.id === DelimiterVersionsFilterStateId.SkippingVersions) {
|
|
||||||
return FILTER_SKIP;
|
|
||||||
}
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
return this.handleKey(nonversionedKey, keyVersionId, value);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
setState(state: FilterState): void {
|
setState(state: FilterState): void {
|
||||||
|
@ -376,11 +339,11 @@ export class DelimiterVersions extends Extension {
|
||||||
this.keyHandlers[stateId] = keyHandler;
|
this.keyHandlers[stateId] = keyHandler;
|
||||||
}
|
}
|
||||||
|
|
||||||
handleKey(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
handleKey(key: string, value: string): FilterReturnValue {
|
||||||
return this.keyHandlers[this.state.id](key, versionId, value);
|
return this.keyHandlers[this.state.id](key, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
keyHandler_NotSkippingV0(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
keyHandler_NotSkippingV0(key: string, value: string): FilterReturnValue {
|
||||||
if (key.startsWith(DbPrefixes.Replay)) {
|
if (key.startsWith(DbPrefixes.Replay)) {
|
||||||
// skip internal replay prefix entirely
|
// skip internal replay prefix entirely
|
||||||
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
||||||
|
@ -392,37 +355,63 @@ export class DelimiterVersions extends Extension {
|
||||||
if (Version.isPHD(value)) {
|
if (Version.isPHD(value)) {
|
||||||
return FILTER_ACCEPT;
|
return FILTER_ACCEPT;
|
||||||
}
|
}
|
||||||
return this.filter_onNewKey(key, versionId, value);
|
return this.filter_onNewKey(key, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
keyHandler_NotSkippingV1(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
keyHandler_NotSkippingV1(key: string, value: string): FilterReturnValue {
|
||||||
// NOTE: this check on PHD is only useful for Artesca, S3C
|
return this.filter_onNewKey(key, value);
|
||||||
// does not use PHDs in V1 format
|
|
||||||
if (Version.isPHD(value)) {
|
|
||||||
return FILTER_ACCEPT;
|
|
||||||
}
|
|
||||||
return this.filter_onNewKey(key, versionId, value);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
filter_onNewKey(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
filter_onNewKey(key: string, value: string): FilterReturnValue {
|
||||||
if (this._reachedMaxKeys()) {
|
if (this._reachedMaxKeys()) {
|
||||||
return FILTER_END;
|
return FILTER_END;
|
||||||
}
|
}
|
||||||
if (versionId === undefined) {
|
const { key: nonversionedKey, versionId: keyVersionId } = this.parseKey(key);
|
||||||
|
if (this.nullKey &&
|
||||||
|
(this.nullKey.key !== nonversionedKey
|
||||||
|
|| this.nullKey.versionId < <string> keyVersionId)) {
|
||||||
|
this.addCurrentNullKey();
|
||||||
|
if (this._reachedMaxKeys()) {
|
||||||
|
// IsTruncated: true is set, which is wanted because
|
||||||
|
// there is at least one more key to output: the one
|
||||||
|
// being processed here
|
||||||
|
return FILTER_END;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let versionId: string;
|
||||||
|
if (keyVersionId === undefined) {
|
||||||
this.masterKey = key;
|
this.masterKey = key;
|
||||||
this.masterVersionId = Version.from(value).getVersionId() || 'null';
|
this.masterVersionId = Version.from(value).getVersionId() || 'null';
|
||||||
this.addKey(this.masterKey, this.masterVersionId, value);
|
versionId = this.masterVersionId;
|
||||||
} else {
|
} else {
|
||||||
if (this.masterKey === key && this.masterVersionId === versionId) {
|
if (keyVersionId === '') {
|
||||||
|
// null key
|
||||||
|
this.cacheNullKey(nonversionedKey, Version.from(value).getVersionId(), value);
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
if (this.masterKey === nonversionedKey && this.masterVersionId === keyVersionId) {
|
||||||
// do not add a version key if it is the master version
|
// do not add a version key if it is the master version
|
||||||
return FILTER_ACCEPT;
|
return FILTER_ACCEPT;
|
||||||
}
|
}
|
||||||
this.addKey(key, versionId, value);
|
versionId = keyVersionId;
|
||||||
}
|
}
|
||||||
|
// add the subprefix to the common prefixes if the key has the delimiter
|
||||||
|
const commonPrefix = this.getCommonPrefix(nonversionedKey);
|
||||||
|
if (commonPrefix) {
|
||||||
|
this.addCommonPrefix(commonPrefix);
|
||||||
|
// transition into SkippingPrefix state to skip all following keys
|
||||||
|
// while they start with the same prefix
|
||||||
|
this.setState(<DelimiterVersionsFilterState_SkippingPrefix> {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingPrefix,
|
||||||
|
prefix: commonPrefix,
|
||||||
|
});
|
||||||
|
return FILTER_ACCEPT;
|
||||||
|
}
|
||||||
|
this.addContents(nonversionedKey, versionId, value);
|
||||||
return FILTER_ACCEPT;
|
return FILTER_ACCEPT;
|
||||||
}
|
}
|
||||||
|
|
||||||
keyHandler_SkippingPrefix(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
keyHandler_SkippingPrefix(key: string, value: string): FilterReturnValue {
|
||||||
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
||||||
if (key.startsWith(prefix)) {
|
if (key.startsWith(prefix)) {
|
||||||
return FILTER_SKIP;
|
return FILTER_SKIP;
|
||||||
|
@ -430,11 +419,39 @@ export class DelimiterVersions extends Extension {
|
||||||
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
||||||
id: DelimiterVersionsFilterStateId.NotSkipping,
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
});
|
});
|
||||||
return this.handleKey(key, versionId, value);
|
return this.handleKey(key, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
keyHandler_SkippingVersions(key: string, versionId: string | undefined, value: string): FilterReturnValue {
|
keyHandler_WaitForNullKey(key: string, value: string): FilterReturnValue {
|
||||||
if (key === this.keyMarker) {
|
const { key: nonversionedKey, versionId } = this.parseKey(key);
|
||||||
|
if (nonversionedKey !== this.keyMarker) {
|
||||||
|
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
||||||
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
|
});
|
||||||
|
return this.handleKey(key, value);
|
||||||
|
}
|
||||||
|
// we may now skip versions until VersionIdMarker
|
||||||
|
this.setState(<DelimiterVersionsFilterState_SkippingVersions> {
|
||||||
|
id: DelimiterVersionsFilterStateId.SkippingVersions,
|
||||||
|
gt: `${this.keyMarker}${VID_SEP}${this.versionIdMarker}`,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (versionId === '') {
|
||||||
|
// only cache the null key if its version is older than
|
||||||
|
// the current version ID marker, otherwise it has already
|
||||||
|
// been output in a previous listing output
|
||||||
|
const nullVersionId = Version.from(value).getVersionId();
|
||||||
|
if (nullVersionId > this.versionIdMarker) {
|
||||||
|
this.cacheNullKey(nonversionedKey, nullVersionId, value);
|
||||||
|
}
|
||||||
|
return FILTER_SKIP;
|
||||||
|
}
|
||||||
|
return this.handleKey(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
keyHandler_SkippingVersions(key: string, value: string): FilterReturnValue {
|
||||||
|
const { key: nonversionedKey, versionId } = this.parseKey(key);
|
||||||
|
if (nonversionedKey === this.keyMarker) {
|
||||||
// since the nonversioned key equals the marker, there is
|
// since the nonversioned key equals the marker, there is
|
||||||
// necessarily a versionId in this key
|
// necessarily a versionId in this key
|
||||||
const _versionId = <string> versionId;
|
const _versionId = <string> versionId;
|
||||||
|
@ -450,21 +467,18 @@ export class DelimiterVersions extends Extension {
|
||||||
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
this.setState(<DelimiterVersionsFilterState_NotSkipping> {
|
||||||
id: DelimiterVersionsFilterStateId.NotSkipping,
|
id: DelimiterVersionsFilterStateId.NotSkipping,
|
||||||
});
|
});
|
||||||
return this.handleKey(key, versionId, value);
|
return this.handleKey(key, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
skippingBase(): string | undefined {
|
skippingBase() {
|
||||||
switch (this.state.id) {
|
switch (this.state.id) {
|
||||||
case DelimiterVersionsFilterStateId.SkippingPrefix:
|
case DelimiterVersionsFilterStateId.SkippingPrefix:
|
||||||
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
const { prefix } = <DelimiterVersionsFilterState_SkippingPrefix> this.state;
|
||||||
return inc(prefix);
|
return prefix;
|
||||||
|
|
||||||
case DelimiterVersionsFilterStateId.SkippingVersions:
|
case DelimiterVersionsFilterStateId.SkippingVersions:
|
||||||
const { gt } = <DelimiterVersionsFilterState_SkippingVersions> this.state;
|
const { gt } = <DelimiterVersionsFilterState_SkippingVersions> this.state;
|
||||||
// the contract of skipping() is to return the first key
|
return gt;
|
||||||
// that can be skipped to, so adding a null byte to skip
|
|
||||||
// over the existing versioned key set in 'gt'
|
|
||||||
return `${gt}\0`;
|
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return SKIP_NONE;
|
return SKIP_NONE;
|
||||||
|
@ -511,12 +525,12 @@ export class DelimiterVersions extends Extension {
|
||||||
// does not fit, so we know the result is now truncated
|
// does not fit, so we know the result is now truncated
|
||||||
// because there remains the null key to be output.
|
// because there remains the null key to be output.
|
||||||
//
|
//
|
||||||
if (this.nullKey) {
|
if (this.nullKey && !this._reachedMaxKeys()) {
|
||||||
this.handleKey(this.nullKey.key, this.nullKey.versionId, this.nullKey.value);
|
this.addCurrentNullKey();
|
||||||
}
|
}
|
||||||
const result: ResultObject = {
|
const result: ResultObject = {
|
||||||
CommonPrefixes: this.CommonPrefixes,
|
CommonPrefixes: this.CommonPrefixes,
|
||||||
Versions: this.Versions,
|
Versions: this.Contents,
|
||||||
IsTruncated: this.IsTruncated,
|
IsTruncated: this.IsTruncated,
|
||||||
};
|
};
|
||||||
if (this.delimiter) {
|
if (this.delimiter) {
|
||||||
|
|
|
@ -6,7 +6,4 @@ module.exports = {
|
||||||
DelimiterMaster: require('./delimiterMaster')
|
DelimiterMaster: require('./delimiterMaster')
|
||||||
.DelimiterMaster,
|
.DelimiterMaster,
|
||||||
MPU: require('./MPU').MultipartUploads,
|
MPU: require('./MPU').MultipartUploads,
|
||||||
DelimiterCurrent: require('./delimiterCurrent').DelimiterCurrent,
|
|
||||||
DelimiterNonCurrent: require('./delimiterNonCurrent').DelimiterNonCurrent,
|
|
||||||
DelimiterOrphanDeleteMarker: require('./delimiterOrphanDeleteMarker').DelimiterOrphanDeleteMarker,
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -52,21 +52,21 @@ class Skip {
|
||||||
assert(this.skipRangeCb);
|
assert(this.skipRangeCb);
|
||||||
|
|
||||||
const filteringResult = this.extension.filter(entry);
|
const filteringResult = this.extension.filter(entry);
|
||||||
const skipTo = this.extension.skipping();
|
const skippingRange = this.extension.skipping();
|
||||||
|
|
||||||
if (filteringResult === FILTER_END) {
|
if (filteringResult === FILTER_END) {
|
||||||
this.listingEndCb();
|
this.listingEndCb();
|
||||||
} else if (filteringResult === FILTER_SKIP
|
} else if (filteringResult === FILTER_SKIP
|
||||||
&& skipTo !== SKIP_NONE) {
|
&& skippingRange !== SKIP_NONE) {
|
||||||
if (++this.streakLength >= MAX_STREAK_LENGTH) {
|
if (++this.streakLength >= MAX_STREAK_LENGTH) {
|
||||||
let newRange;
|
let newRange;
|
||||||
if (Array.isArray(skipTo)) {
|
if (Array.isArray(skippingRange)) {
|
||||||
newRange = [];
|
newRange = [];
|
||||||
for (let i = 0; i < skipTo.length; ++i) {
|
for (let i = 0; i < skippingRange.length; ++i) {
|
||||||
newRange.push(skipTo[i]);
|
newRange.push(this._inc(skippingRange[i]));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
newRange = skipTo;
|
newRange = this._inc(skippingRange);
|
||||||
}
|
}
|
||||||
/* Avoid to loop on the same range again and again. */
|
/* Avoid to loop on the same range again and again. */
|
||||||
if (newRange === this.gteParams) {
|
if (newRange === this.gteParams) {
|
||||||
|
@ -79,6 +79,16 @@ class Skip {
|
||||||
this.streakLength = 0;
|
this.streakLength = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_inc(str) {
|
||||||
|
if (!str) {
|
||||||
|
return str;
|
||||||
|
}
|
||||||
|
const lastCharValue = str.charCodeAt(str.length - 1);
|
||||||
|
const lastCharNewValue = String.fromCharCode(lastCharValue + 1);
|
||||||
|
|
||||||
|
return `${str.slice(0, str.length - 1)}${lastCharNewValue}`;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,7 @@ function vaultSignatureCb(
|
||||||
err: Error | null,
|
err: Error | null,
|
||||||
authInfo: { message: { body: any } },
|
authInfo: { message: { body: any } },
|
||||||
log: Logger,
|
log: Logger,
|
||||||
callback: (err: Error | null, data?: any, results?: any, params?: any, infos?: any) => void,
|
callback: (err: Error | null, data?: any, results?: any, params?: any) => void,
|
||||||
streamingV4Params?: any
|
streamingV4Params?: any
|
||||||
) {
|
) {
|
||||||
// vaultclient API guarantees that it returns:
|
// vaultclient API guarantees that it returns:
|
||||||
|
@ -38,9 +38,7 @@ function vaultSignatureCb(
|
||||||
}
|
}
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
log.addDefaultFields(auditLog);
|
log.addDefaultFields(auditLog);
|
||||||
return callback(null, userInfo, authorizationResults, streamingV4Params, {
|
return callback(null, userInfo, authorizationResults, streamingV4Params);
|
||||||
accountQuota: info.accountQuota || {},
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export type AuthV4RequestParams = {
|
export type AuthV4RequestParams = {
|
||||||
|
@ -386,19 +384,4 @@ export default class Vault {
|
||||||
return callback(null, respBody);
|
return callback(null, respBody);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
report(log: Logger, callback: (err: Error | null, data?: any) => void) {
|
|
||||||
// call the report function of the client
|
|
||||||
if (!this.client.report) {
|
|
||||||
return callback(null, {});
|
|
||||||
}
|
|
||||||
// @ts-ignore
|
|
||||||
return this.client.report(log.getSerializedUids(), (err: Error | null, obj?: any) => {
|
|
||||||
if (err) {
|
|
||||||
log.debug(`error from ${this.implName}`, { error: err });
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
return callback(null, obj);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,12 +9,10 @@ import * as constants from '../constants';
|
||||||
import constructStringToSignV2 from './v2/constructStringToSign';
|
import constructStringToSignV2 from './v2/constructStringToSign';
|
||||||
import constructStringToSignV4 from './v4/constructStringToSign';
|
import constructStringToSignV4 from './v4/constructStringToSign';
|
||||||
import { convertUTCtoISO8601 } from './v4/timeUtils';
|
import { convertUTCtoISO8601 } from './v4/timeUtils';
|
||||||
import * as vaultUtilities from './backends/in_memory/vaultUtilities';
|
import * as vaultUtilities from './in_memory/vaultUtilities';
|
||||||
import * as inMemoryBackend from './backends/in_memory/Backend';
|
import * as backend from './in_memory/Backend';
|
||||||
import baseBackend from './backends/base';
|
import validateAuthConfig from './in_memory/validateAuthConfig';
|
||||||
import chainBackend from './backends/ChainBackend';
|
import AuthLoader from './in_memory/AuthLoader';
|
||||||
import validateAuthConfig from './backends/in_memory/validateAuthConfig';
|
|
||||||
import AuthLoader from './backends/in_memory/AuthLoader';
|
|
||||||
import Vault from './Vault';
|
import Vault from './Vault';
|
||||||
|
|
||||||
let vault: Vault | null = null;
|
let vault: Vault | null = null;
|
||||||
|
@ -163,20 +161,6 @@ function doAuth(
|
||||||
return cb(errors.InternalError);
|
return cb(errors.InternalError);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This function will generate a version 4 content-md5 header
|
|
||||||
* It looks at the request path to determine what kind of header encoding is required
|
|
||||||
*
|
|
||||||
* @param path - the request path
|
|
||||||
* @param payload - the request payload to hash
|
|
||||||
*/
|
|
||||||
function generateContentMD5Header(
|
|
||||||
path: string,
|
|
||||||
payload: string,
|
|
||||||
) {
|
|
||||||
const encoding = path && path.startsWith('/_/backbeat/') ? 'hex' : 'base64';
|
|
||||||
return crypto.createHash('md5').update(payload, 'binary').digest(encoding);
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* This function will generate a version 4 header
|
* This function will generate a version 4 header
|
||||||
*
|
*
|
||||||
|
@ -189,7 +173,6 @@ function generateContentMD5Header(
|
||||||
* @param [proxyPath] - path that gets proxied by reverse proxy
|
* @param [proxyPath] - path that gets proxied by reverse proxy
|
||||||
* @param [sessionToken] - security token if the access/secret keys
|
* @param [sessionToken] - security token if the access/secret keys
|
||||||
* are temporary credentials from STS
|
* are temporary credentials from STS
|
||||||
* @param [payload] - body of the request if any
|
|
||||||
*/
|
*/
|
||||||
function generateV4Headers(
|
function generateV4Headers(
|
||||||
request: any,
|
request: any,
|
||||||
|
@ -197,9 +180,8 @@ function generateV4Headers(
|
||||||
accessKey: string,
|
accessKey: string,
|
||||||
secretKeyValue: string,
|
secretKeyValue: string,
|
||||||
awsService: string,
|
awsService: string,
|
||||||
proxyPath?: string,
|
proxyPath: string,
|
||||||
sessionToken?: string,
|
sessionToken: string
|
||||||
payload?: string,
|
|
||||||
) {
|
) {
|
||||||
Object.assign(request, { headers: {} });
|
Object.assign(request, { headers: {} });
|
||||||
const amzDate = convertUTCtoISO8601(Date.now());
|
const amzDate = convertUTCtoISO8601(Date.now());
|
||||||
|
@ -212,7 +194,7 @@ function generateV4Headers(
|
||||||
const timestamp = amzDate;
|
const timestamp = amzDate;
|
||||||
const algorithm = 'AWS4-HMAC-SHA256';
|
const algorithm = 'AWS4-HMAC-SHA256';
|
||||||
|
|
||||||
payload = payload || '';
|
let payload = '';
|
||||||
if (request.method === 'POST') {
|
if (request.method === 'POST') {
|
||||||
payload = queryString.stringify(data, undefined, undefined, {
|
payload = queryString.stringify(data, undefined, undefined, {
|
||||||
encodeURIComponent,
|
encodeURIComponent,
|
||||||
|
@ -223,7 +205,6 @@ function generateV4Headers(
|
||||||
request.setHeader('host', request._headers.host);
|
request.setHeader('host', request._headers.host);
|
||||||
request.setHeader('x-amz-date', amzDate);
|
request.setHeader('x-amz-date', amzDate);
|
||||||
request.setHeader('x-amz-content-sha256', payloadChecksum);
|
request.setHeader('x-amz-content-sha256', payloadChecksum);
|
||||||
request.setHeader('content-md5', generateContentMD5Header(request.path, payload));
|
|
||||||
|
|
||||||
if (sessionToken) {
|
if (sessionToken) {
|
||||||
request.setHeader('x-amz-security-token', sessionToken);
|
request.setHeader('x-amz-security-token', sessionToken);
|
||||||
|
@ -234,8 +215,7 @@ function generateV4Headers(
|
||||||
.filter(headerName =>
|
.filter(headerName =>
|
||||||
headerName.startsWith('x-amz-')
|
headerName.startsWith('x-amz-')
|
||||||
|| headerName.startsWith('x-scal-')
|
|| headerName.startsWith('x-scal-')
|
||||||
|| headerName === 'content-md5'
|
|| headerName === 'host'
|
||||||
|| headerName === 'host',
|
|
||||||
).sort().join(';');
|
).sort().join(';');
|
||||||
const params = { request, signedHeaders, payloadChecksum,
|
const params = { request, signedHeaders, payloadChecksum,
|
||||||
credentialScope, timestamp, query: data,
|
credentialScope, timestamp, query: data,
|
||||||
|
@ -256,8 +236,7 @@ function generateV4Headers(
|
||||||
|
|
||||||
export const server = { extractParams, doAuth }
|
export const server = { extractParams, doAuth }
|
||||||
export const client = { generateV4Headers, constructStringToSignV2 }
|
export const client = { generateV4Headers, constructStringToSignV2 }
|
||||||
export const inMemory = { backend: inMemoryBackend, validateAuthConfig, AuthLoader }
|
export const inMemory = { backend, validateAuthConfig, AuthLoader }
|
||||||
export const backends = { baseBackend, chainBackend }
|
|
||||||
export {
|
export {
|
||||||
setAuthHandler as setHandler,
|
setAuthHandler as setHandler,
|
||||||
AuthInfo,
|
AuthInfo,
|
||||||
|
|
|
@ -1,233 +0,0 @@
|
||||||
import assert from 'assert';
|
|
||||||
import async from 'async';
|
|
||||||
import errors from '../../errors';
|
|
||||||
import BaseBackend from './base';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Class that provides an authentication backend that will verify signatures
|
|
||||||
* and retrieve emails and canonical ids associated with an account using a
|
|
||||||
* given list of authentication backends and vault clients.
|
|
||||||
*
|
|
||||||
* @class ChainBackend
|
|
||||||
*/
|
|
||||||
export default class ChainBackend extends BaseBackend {
|
|
||||||
_clients: any[];
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {string} service - service id
|
|
||||||
* @param {object[]} clients - list of authentication backends or vault clients
|
|
||||||
*/
|
|
||||||
constructor(service: string, clients: any[]) {
|
|
||||||
super(service);
|
|
||||||
|
|
||||||
assert(Array.isArray(clients) && clients.length > 0, 'invalid client list');
|
|
||||||
assert(clients.every(client =>
|
|
||||||
typeof client.verifySignatureV4 === 'function' &&
|
|
||||||
typeof client.verifySignatureV2 === 'function' &&
|
|
||||||
typeof client.getCanonicalIds === 'function' &&
|
|
||||||
typeof client.getEmailAddresses === 'function' &&
|
|
||||||
typeof client.checkPolicies === 'function' &&
|
|
||||||
typeof client.healthcheck === 'function',
|
|
||||||
), 'invalid client: missing required auth backend methods');
|
|
||||||
this._clients = clients;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
|
||||||
* try task against each client for one to be successful
|
|
||||||
*/
|
|
||||||
_tryEachClient(task: any, cb: any) {
|
|
||||||
// @ts-ignore
|
|
||||||
async.tryEach(this._clients.map(client => done => task(client, done)), cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* apply task to all clients
|
|
||||||
*/
|
|
||||||
_forEachClient(task: any, cb: any) {
|
|
||||||
async.map(this._clients, task, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
verifySignatureV2(
|
|
||||||
stringToSign: string,
|
|
||||||
signatureFromRequest: string,
|
|
||||||
accessKey: string,
|
|
||||||
options: any,
|
|
||||||
callback: any,
|
|
||||||
) {
|
|
||||||
this._tryEachClient((client, done) => client.verifySignatureV2(
|
|
||||||
stringToSign,
|
|
||||||
signatureFromRequest,
|
|
||||||
accessKey,
|
|
||||||
options,
|
|
||||||
done,
|
|
||||||
), callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
verifySignatureV4(
|
|
||||||
stringToSign: string,
|
|
||||||
signatureFromRequest: string,
|
|
||||||
accessKey: string,
|
|
||||||
region: string,
|
|
||||||
scopeDate: string,
|
|
||||||
options: any,
|
|
||||||
callback: any,
|
|
||||||
) {
|
|
||||||
this._tryEachClient((client, done) => client.verifySignatureV4(
|
|
||||||
stringToSign,
|
|
||||||
signatureFromRequest,
|
|
||||||
accessKey,
|
|
||||||
region,
|
|
||||||
scopeDate,
|
|
||||||
options,
|
|
||||||
done,
|
|
||||||
), callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
static _mergeObjects(objectResponses: any) {
|
|
||||||
return objectResponses.reduce(
|
|
||||||
(retObj, resObj) => Object.assign(retObj, resObj.message.body),
|
|
||||||
{});
|
|
||||||
}
|
|
||||||
|
|
||||||
getCanonicalIds(emailAddresses: string[], options: any, callback: any) {
|
|
||||||
this._forEachClient(
|
|
||||||
(client, done) => client.getCanonicalIds(emailAddresses, options, done),
|
|
||||||
(err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
// TODO: atm naive merge, better handling of conflicting email results
|
|
||||||
return callback(null, {
|
|
||||||
message: {
|
|
||||||
body: ChainBackend._mergeObjects(res),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
getEmailAddresses(canonicalIDs: string[], options: any, callback: any) {
|
|
||||||
this._forEachClient(
|
|
||||||
(client, done) => client.getEmailAddresses(canonicalIDs, options, done),
|
|
||||||
(err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
return callback(null, {
|
|
||||||
message: {
|
|
||||||
body: ChainBackend._mergeObjects(res),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
* merge policy responses into a single message
|
|
||||||
*/
|
|
||||||
static _mergePolicies(policyResponses: any) {
|
|
||||||
const policyMap: any = {};
|
|
||||||
|
|
||||||
policyResponses.forEach(resp => {
|
|
||||||
if (!resp.message || !Array.isArray(resp.message.body)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const check = (policy) => {
|
|
||||||
const key = (policy.arn || '') + (policy.versionId || '') + (policy.action || '');
|
|
||||||
if (!policyMap[key] || !policyMap[key].isAllowed) {
|
|
||||||
policyMap[key] = policy;
|
|
||||||
}
|
|
||||||
// else is duplicate policy
|
|
||||||
};
|
|
||||||
|
|
||||||
resp.message.body.forEach(policy => {
|
|
||||||
if (Array.isArray(policy)) {
|
|
||||||
policy.forEach(authResult => check(authResult));
|
|
||||||
} else {
|
|
||||||
check(policy);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
return Object.keys(policyMap).map(key => {
|
|
||||||
const policyRes: any = { isAllowed: policyMap[key].isAllowed };
|
|
||||||
if (policyMap[key].arn !== '') {
|
|
||||||
policyRes.arn = policyMap[key].arn;
|
|
||||||
}
|
|
||||||
if (policyMap[key].versionId) {
|
|
||||||
policyRes.versionId = policyMap[key].versionId;
|
|
||||||
}
|
|
||||||
if (policyMap[key].isImplicit !== undefined) {
|
|
||||||
policyRes.isImplicit = policyMap[key].isImplicit;
|
|
||||||
}
|
|
||||||
if (policyMap[key].action) {
|
|
||||||
policyRes.action = policyMap[key].action;
|
|
||||||
}
|
|
||||||
return policyRes;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
|
||||||
response format:
|
|
||||||
{ message: {
|
|
||||||
body: [{}],
|
|
||||||
code: number,
|
|
||||||
message: string,
|
|
||||||
} }
|
|
||||||
*/
|
|
||||||
checkPolicies(requestContextParams: any, userArn: string, options: any, callback: any) {
|
|
||||||
this._forEachClient((client, done) => client.checkPolicies(
|
|
||||||
requestContextParams,
|
|
||||||
userArn,
|
|
||||||
options,
|
|
||||||
done,
|
|
||||||
), (err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
return callback(null, {
|
|
||||||
message: {
|
|
||||||
body: ChainBackend._mergePolicies(res),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
healthcheck(reqUid: string, callback: any) {
|
|
||||||
this._forEachClient((client, done) =>
|
|
||||||
client.healthcheck(reqUid, (err, res) => done(null, {
|
|
||||||
error: !!err ? err : null,
|
|
||||||
status: res,
|
|
||||||
}),
|
|
||||||
), (err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
const isError = res.some(results => !!results.error);
|
|
||||||
if (isError) {
|
|
||||||
return callback(errors.InternalError, res);
|
|
||||||
}
|
|
||||||
return callback(null, res);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
report(reqUid: string, callback: any) {
|
|
||||||
this._forEachClient((client, done) =>
|
|
||||||
client.report(reqUid, done),
|
|
||||||
(err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return callback(err);
|
|
||||||
}
|
|
||||||
const mergedRes = res.reduce((acc, val) => {
|
|
||||||
Object.keys(val).forEach(k => {
|
|
||||||
acc[k] = val[k];
|
|
||||||
});
|
|
||||||
return acc;
|
|
||||||
}, {});
|
|
||||||
|
|
||||||
return callback(null, mergedRes);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,96 +0,0 @@
|
||||||
import errors from '../../errors';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Base backend class
|
|
||||||
*
|
|
||||||
* @class BaseBackend
|
|
||||||
*/
|
|
||||||
export default class BaseBackend {
|
|
||||||
service: string;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param {string} service - service identifer for construction arn
|
|
||||||
*/
|
|
||||||
constructor(service: string) {
|
|
||||||
this.service = service;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** verifySignatureV2
|
|
||||||
* @param stringToSign - string to sign built per AWS rules
|
|
||||||
* @param signatureFromRequest - signature sent with request
|
|
||||||
* @param accessKey - account accessKey
|
|
||||||
* @param options - contains algorithm (SHA1 or SHA256)
|
|
||||||
* @param callback - callback with either error or user info
|
|
||||||
* @return calls callback
|
|
||||||
*/
|
|
||||||
verifySignatureV2(
|
|
||||||
stringToSign: string,
|
|
||||||
signatureFromRequest: string,
|
|
||||||
accessKey: string,
|
|
||||||
options: any,
|
|
||||||
callback: any
|
|
||||||
) {
|
|
||||||
return callback(errors.AuthMethodNotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/** verifySignatureV4
|
|
||||||
* @param stringToSign - string to sign built per AWS rules
|
|
||||||
* @param signatureFromRequest - signature sent with request
|
|
||||||
* @param accessKey - account accessKey
|
|
||||||
* @param region - region specified in request credential
|
|
||||||
* @param scopeDate - date specified in request credential
|
|
||||||
* @param options - options to send to Vault
|
|
||||||
* (just contains reqUid for logging in Vault)
|
|
||||||
* @param callback - callback with either error or user info
|
|
||||||
* @return calls callback
|
|
||||||
*/
|
|
||||||
verifySignatureV4(
|
|
||||||
stringToSign: string,
|
|
||||||
signatureFromRequest: string,
|
|
||||||
accessKey: string,
|
|
||||||
region: string,
|
|
||||||
scopeDate: string,
|
|
||||||
options: any,
|
|
||||||
callback: any
|
|
||||||
) {
|
|
||||||
return callback(errors.AuthMethodNotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets canonical ID's for a list of accounts
|
|
||||||
* based on email associated with account
|
|
||||||
* @param emails - list of email addresses
|
|
||||||
* @param options - to send log id to vault
|
|
||||||
* @param callback - callback to calling function
|
|
||||||
* @returns callback with either error or
|
|
||||||
* object with email addresses as keys and canonical IDs
|
|
||||||
* as values
|
|
||||||
*/
|
|
||||||
getCanonicalIds(emails: string[], options: any, callback: any) {
|
|
||||||
return callback(errors.AuthMethodNotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets email addresses (referred to as diplay names for getACL's)
|
|
||||||
* for a list of accounts based on canonical IDs associated with account
|
|
||||||
* @param canonicalIDs - list of canonicalIDs
|
|
||||||
* @param options - to send log id to vault
|
|
||||||
* @param callback - callback to calling function
|
|
||||||
* @returns callback with either error or
|
|
||||||
* an object from Vault containing account canonicalID
|
|
||||||
* as each object key and an email address as the value (or "NotFound")
|
|
||||||
*/
|
|
||||||
getEmailAddresses(canonicalIDs: string[], options: any, callback: any) {
|
|
||||||
return callback(errors.AuthMethodNotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
checkPolicies(requestContextParams: any, userArn: string, options: any, callback: any) {
|
|
||||||
return callback(null, { message: { body: [] } });
|
|
||||||
}
|
|
||||||
|
|
||||||
healthcheck(reqUid: string, callback: any) {
|
|
||||||
return callback(null, { code: 200, message: 'OK' });
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -4,7 +4,7 @@ import joi from 'joi';
|
||||||
import werelogs from 'werelogs';
|
import werelogs from 'werelogs';
|
||||||
import * as types from './types';
|
import * as types from './types';
|
||||||
import { Account, Accounts } from './types';
|
import { Account, Accounts } from './types';
|
||||||
import ARN from '../../../models/ARN';
|
import ARN from '../../models/ARN';
|
||||||
|
|
||||||
/** Load authentication information from files or pre-loaded account objects */
|
/** Load authentication information from files or pre-loaded account objects */
|
||||||
export default class AuthLoader {
|
export default class AuthLoader {
|
|
@ -1,9 +1,7 @@
|
||||||
import crypto from 'crypto';
|
import * as crypto from 'crypto';
|
||||||
import { Logger } from 'werelogs';
|
import errors from '../../errors';
|
||||||
import errors from '../../../errors';
|
|
||||||
import { calculateSigningKey, hashSignature } from './vaultUtilities';
|
import { calculateSigningKey, hashSignature } from './vaultUtilities';
|
||||||
import Indexer from './Indexer';
|
import Indexer from './Indexer';
|
||||||
import BaseBackend from '../base';
|
|
||||||
import { Accounts } from './types';
|
import { Accounts } from './types';
|
||||||
|
|
||||||
function _formatResponse(userInfoToSend: any) {
|
function _formatResponse(userInfoToSend: any) {
|
||||||
|
@ -17,32 +15,26 @@ function _formatResponse(userInfoToSend: any) {
|
||||||
/**
|
/**
|
||||||
* Class that provides a memory backend for verifying signatures and getting
|
* Class that provides a memory backend for verifying signatures and getting
|
||||||
* emails and canonical ids associated with an account.
|
* emails and canonical ids associated with an account.
|
||||||
*
|
|
||||||
* @class InMemoryBackend
|
|
||||||
*/
|
*/
|
||||||
class InMemoryBackend extends BaseBackend {
|
class Backend {
|
||||||
indexer: Indexer;
|
indexer: Indexer;
|
||||||
formatResponse: any;
|
service: string;
|
||||||
|
|
||||||
/**
|
constructor(service: string, indexer: Indexer) {
|
||||||
* @constructor
|
this.service = service;
|
||||||
* @param service - service identifer for construction arn
|
|
||||||
* @param indexer - indexer instance for retrieving account info
|
|
||||||
* @param formatter - function which accepts user info to send
|
|
||||||
* back and returns it in an object
|
|
||||||
*/
|
|
||||||
constructor(service: string, indexer: Indexer, formatter: typeof _formatResponse) {
|
|
||||||
super(service);
|
|
||||||
this.indexer = indexer;
|
this.indexer = indexer;
|
||||||
this.formatResponse = formatter;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||||
verifySignatureV2(
|
verifySignatureV2(
|
||||||
stringToSign: string,
|
stringToSign: string,
|
||||||
signatureFromRequest: string,
|
signatureFromRequest: string,
|
||||||
accessKey: string,
|
accessKey: string,
|
||||||
options: any,
|
options: { algo: 'SHA256' | 'SHA1' },
|
||||||
callback: any,
|
callback: (
|
||||||
|
error: Error | null,
|
||||||
|
data?: ReturnType<typeof _formatResponse>
|
||||||
|
) => void
|
||||||
) {
|
) {
|
||||||
const entity = this.indexer.getEntityByKey(accessKey);
|
const entity = this.indexer.getEntityByKey(accessKey);
|
||||||
if (!entity) {
|
if (!entity) {
|
||||||
|
@ -58,21 +50,27 @@ class InMemoryBackend extends BaseBackend {
|
||||||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
||||||
canonicalID: entity.canonicalID,
|
canonicalID: entity.canonicalID,
|
||||||
arn: entity.arn,
|
arn: entity.arn,
|
||||||
|
// TODO Why?
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
IAMdisplayName: entity.IAMdisplayName,
|
IAMdisplayName: entity.IAMdisplayName,
|
||||||
};
|
};
|
||||||
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
const vaultReturnObject = _formatResponse(userInfoToSend);
|
||||||
return callback(null, vaultReturnObject);
|
return callback(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO Options not used. Why ?
|
||||||
|
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||||
verifySignatureV4(
|
verifySignatureV4(
|
||||||
stringToSign: string,
|
stringToSign: string,
|
||||||
signatureFromRequest: string,
|
signatureFromRequest: string,
|
||||||
accessKey: string,
|
accessKey: string,
|
||||||
region: string,
|
region: string,
|
||||||
scopeDate: string,
|
scopeDate: string,
|
||||||
options: any,
|
_options: { algo: 'SHA256' | 'SHA1' },
|
||||||
callback: any,
|
callback: (
|
||||||
|
err: Error | null,
|
||||||
|
data?: ReturnType<typeof _formatResponse>
|
||||||
|
) => void
|
||||||
) {
|
) {
|
||||||
const entity = this.indexer.getEntityByKey(accessKey);
|
const entity = this.indexer.getEntityByKey(accessKey);
|
||||||
if (!entity) {
|
if (!entity) {
|
||||||
|
@ -89,14 +87,21 @@ class InMemoryBackend extends BaseBackend {
|
||||||
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
accountDisplayName: this.indexer.getAcctDisplayName(entity),
|
||||||
canonicalID: entity.canonicalID,
|
canonicalID: entity.canonicalID,
|
||||||
arn: entity.arn,
|
arn: entity.arn,
|
||||||
|
// TODO Why?
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
IAMdisplayName: entity.IAMdisplayName,
|
IAMdisplayName: entity.IAMdisplayName,
|
||||||
};
|
};
|
||||||
const vaultReturnObject = this.formatResponse(userInfoToSend);
|
const vaultReturnObject = _formatResponse(userInfoToSend);
|
||||||
return callback(null, vaultReturnObject);
|
return callback(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
getCanonicalIds(emails: string[], log: Logger, cb: any) {
|
// TODO log not used. Why ?
|
||||||
|
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||||
|
getCanonicalIds(
|
||||||
|
emails: string[],
|
||||||
|
_log: any,
|
||||||
|
cb: (err: null, data: { message: { body: any } }) => void
|
||||||
|
) {
|
||||||
const results = {};
|
const results = {};
|
||||||
emails.forEach(email => {
|
emails.forEach(email => {
|
||||||
const lowercasedEmail = email.toLowerCase();
|
const lowercasedEmail = email.toLowerCase();
|
||||||
|
@ -116,7 +121,13 @@ class InMemoryBackend extends BaseBackend {
|
||||||
return cb(null, vaultReturnObject);
|
return cb(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
getEmailAddresses(canonicalIDs: string[], options: any, cb: any) {
|
// TODO options not used. Why ?
|
||||||
|
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||||
|
getEmailAddresses(
|
||||||
|
canonicalIDs: string[],
|
||||||
|
_options: any,
|
||||||
|
cb: (err: null, data: { message: { body: any } }) => void
|
||||||
|
) {
|
||||||
const results = {};
|
const results = {};
|
||||||
canonicalIDs.forEach(canonicalId => {
|
canonicalIDs.forEach(canonicalId => {
|
||||||
const foundEntity = this.indexer.getEntityByCanId(canonicalId);
|
const foundEntity = this.indexer.getEntityByCanId(canonicalId);
|
||||||
|
@ -134,17 +145,24 @@ class InMemoryBackend extends BaseBackend {
|
||||||
return cb(null, vaultReturnObject);
|
return cb(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO options not used. Why ?
|
||||||
|
// CODEQUALITY-TODO-SYNC Should be synchronous
|
||||||
/**
|
/**
|
||||||
* Gets accountIds for a list of accounts based on
|
* Gets accountIds for a list of accounts based on
|
||||||
* the canonical IDs associated with the account
|
* the canonical IDs associated with the account
|
||||||
* @param canonicalIDs - list of canonicalIDs
|
* @param canonicalIDs - list of canonicalIDs
|
||||||
* @param options - to send log id to vault
|
* @param _options - to send log id to vault
|
||||||
* @param cb - callback to calling function
|
* @param cb - callback to calling function
|
||||||
* @returns callback with either error or
|
* @returns The next is wrong. Here to keep archives.
|
||||||
|
* callback with either error or
|
||||||
* an object from Vault containing account canonicalID
|
* an object from Vault containing account canonicalID
|
||||||
* as each object key and an accountId as the value (or "NotFound")
|
* as each object key and an accountId as the value (or "NotFound")
|
||||||
*/
|
*/
|
||||||
getAccountIds(canonicalIDs: string[], options: any, cb: any) {
|
getAccountIds(
|
||||||
|
canonicalIDs: string[],
|
||||||
|
_options: any,
|
||||||
|
cb: (err: null, data: { message: { body: any } }) => void
|
||||||
|
) {
|
||||||
const results = {};
|
const results = {};
|
||||||
canonicalIDs.forEach(canonicalID => {
|
canonicalIDs.forEach(canonicalID => {
|
||||||
const foundEntity = this.indexer.getEntityByCanId(canonicalID);
|
const foundEntity = this.indexer.getEntityByCanId(canonicalID);
|
||||||
|
@ -161,34 +179,16 @@ class InMemoryBackend extends BaseBackend {
|
||||||
};
|
};
|
||||||
return cb(null, vaultReturnObject);
|
return cb(null, vaultReturnObject);
|
||||||
}
|
}
|
||||||
|
|
||||||
report(log: Logger, callback: any) {
|
|
||||||
return callback(null, {});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class S3AuthBackend extends Backend {
|
||||||
class S3AuthBackend extends InMemoryBackend {
|
constructor(authdata: Accounts) {
|
||||||
/**
|
super('s3', new Indexer(authdata));
|
||||||
* @constructor
|
|
||||||
* @param authdata - the authentication config file's data
|
|
||||||
* @param authdata.accounts - array of account objects
|
|
||||||
* @param authdata.accounts[].name - account name
|
|
||||||
* @param authdata.accounts[].email - account email
|
|
||||||
* @param authdata.accounts[].arn - IAM resource name
|
|
||||||
* @param authdata.accounts[].canonicalID - account canonical ID
|
|
||||||
* @param authdata.accounts[].shortid - short account ID
|
|
||||||
* @param authdata.accounts[].keys - array of key objects
|
|
||||||
* @param authdata.accounts[].keys[].access - access key
|
|
||||||
* @param authdata.accounts[].keys[].secret - secret key
|
|
||||||
*/
|
|
||||||
constructor(authdata?: Accounts) {
|
|
||||||
super('s3', new Indexer(authdata), _formatResponse);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
refreshAuthData(authData?: Accounts) {
|
refreshAuthData(authData: Accounts) {
|
||||||
this.indexer = new Indexer(authData);
|
this.indexer = new Indexer(authData);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export { S3AuthBackend as s3 }
|
export { S3AuthBackend as s3 };
|
|
@ -42,40 +42,37 @@ export default function awsURIencode(
|
||||||
if (typeof input !== 'string') {
|
if (typeof input !== 'string') {
|
||||||
return '';
|
return '';
|
||||||
}
|
}
|
||||||
let encoded = "";
|
|
||||||
|
// precalc slash and star based on configs
|
||||||
const slash = encodeSlash === undefined || encodeSlash ? '%2F' : '/';
|
const slash = encodeSlash === undefined || encodeSlash ? '%2F' : '/';
|
||||||
const star = noEncodeStar !== undefined && noEncodeStar ? '*' : '%2A';
|
const star = noEncodeStar !== undefined && noEncodeStar ? '*' : '%2A';
|
||||||
for (let i = 0; i < input.length; i++) {
|
const encoded: string[] = [];
|
||||||
let ch = input.charAt(i);
|
|
||||||
if ((ch >= 'A' && ch <= 'Z') ||
|
const charArray = Array.from(input);
|
||||||
(ch >= 'a' && ch <= 'z') ||
|
for (const ch of charArray) {
|
||||||
(ch >= '0' && ch <= '9') ||
|
switch (true) {
|
||||||
ch === '_' || ch === '-' ||
|
case ch >= 'A' && ch <= 'Z':
|
||||||
ch === '~' || ch === '.') {
|
case ch >= 'a' && ch <= 'z':
|
||||||
encoded = encoded.concat(ch);
|
case ch >= '0' && ch <= '9':
|
||||||
} else if (ch === ' ') {
|
case ch === '-':
|
||||||
encoded = encoded.concat('%20');
|
case ch === '_':
|
||||||
} else if (ch === '/') {
|
case ch === '~':
|
||||||
encoded = encoded.concat(slash);
|
case ch === '.':
|
||||||
} else if (ch === '*') {
|
encoded.push(ch);
|
||||||
encoded = encoded.concat(star);
|
break;
|
||||||
} else {
|
case ch === '/':
|
||||||
if (ch >= '\uD800' && ch <= '\uDBFF') {
|
encoded.push(slash);
|
||||||
// If this character is a high surrogate peek the next character
|
break;
|
||||||
// and join it with this one if the next character is a low
|
case ch === '*':
|
||||||
// surrogate.
|
encoded.push(star);
|
||||||
// Otherwise the encoded URI will contain the two surrogates as
|
break;
|
||||||
// two distinct UTF-8 sequences which is not valid UTF-8.
|
case ch === ' ':
|
||||||
if (i + 1 < input.length) {
|
encoded.push('%20');
|
||||||
const ch2 = input.charAt(i + 1);
|
break;
|
||||||
if (ch2 >= '\uDC00' && ch2 <= '\uDFFF') {
|
default:
|
||||||
i++;
|
encoded.push(_toHexUTF8(ch));
|
||||||
ch += ch2;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
return encoded.join('');
|
||||||
encoded = encoded.concat(_toHexUTF8(ch));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return encoded;
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -132,17 +132,6 @@ export function check(
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
return { err: errors.RequestTimeTooSkewed };
|
||||||
}
|
}
|
||||||
|
|
||||||
let proxyPath: string | undefined;
|
|
||||||
if (request.headers.proxy_path) {
|
|
||||||
try {
|
|
||||||
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
|
||||||
} catch (err) {
|
|
||||||
log.debug('invalid proxy_path header', { proxyPath, err });
|
|
||||||
return { err: errors.InvalidArgument.customizeDescription(
|
|
||||||
'invalid proxy_path header') };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const stringToSign = constructStringToSign({
|
const stringToSign = constructStringToSign({
|
||||||
log,
|
log,
|
||||||
request,
|
request,
|
||||||
|
@ -152,7 +141,6 @@ export function check(
|
||||||
timestamp,
|
timestamp,
|
||||||
payloadChecksum,
|
payloadChecksum,
|
||||||
awsService: service,
|
awsService: service,
|
||||||
proxyPath,
|
|
||||||
});
|
});
|
||||||
log.trace('constructed stringToSign', { stringToSign });
|
log.trace('constructed stringToSign', { stringToSign });
|
||||||
if (stringToSign instanceof Error) {
|
if (stringToSign instanceof Error) {
|
||||||
|
|
|
@ -56,17 +56,6 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
|
||||||
return { err: errors.RequestTimeTooSkewed };
|
return { err: errors.RequestTimeTooSkewed };
|
||||||
}
|
}
|
||||||
|
|
||||||
let proxyPath: string | undefined;
|
|
||||||
if (request.headers.proxy_path) {
|
|
||||||
try {
|
|
||||||
proxyPath = decodeURIComponent(request.headers.proxy_path);
|
|
||||||
} catch (err) {
|
|
||||||
log.debug('invalid proxy_path header', { proxyPath });
|
|
||||||
return { err: errors.InvalidArgument.customizeDescription(
|
|
||||||
'invalid proxy_path header') };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// In query v4 auth, the canonical request needs
|
// In query v4 auth, the canonical request needs
|
||||||
// to include the query params OTHER THAN
|
// to include the query params OTHER THAN
|
||||||
// the signature so create a
|
// the signature so create a
|
||||||
|
@ -92,7 +81,6 @@ export function check(request: any, log: Logger, data: { [key: string]: string }
|
||||||
credentialScope:
|
credentialScope:
|
||||||
`${scopeDate}/${region}/${service}/${requestType}`,
|
`${scopeDate}/${region}/${service}/${requestType}`,
|
||||||
awsService: service,
|
awsService: service,
|
||||||
proxyPath,
|
|
||||||
});
|
});
|
||||||
if (stringToSign instanceof Error) {
|
if (stringToSign instanceof Error) {
|
||||||
return { err: stringToSign };
|
return { err: stringToSign };
|
||||||
|
|
|
@ -3,7 +3,7 @@ import async from 'async';
|
||||||
import errors from '../../../errors';
|
import errors from '../../../errors';
|
||||||
import { Logger } from 'werelogs';
|
import { Logger } from 'werelogs';
|
||||||
import Vault, { AuthV4RequestParams } from '../../Vault';
|
import Vault, { AuthV4RequestParams } from '../../Vault';
|
||||||
import { Callback } from '../../backends/in_memory/types';
|
import { Callback } from '../../in_memory/types';
|
||||||
|
|
||||||
import constructChunkStringToSign from './constructChunkStringToSign';
|
import constructChunkStringToSign from './constructChunkStringToSign';
|
||||||
|
|
||||||
|
|
|
@ -1,569 +0,0 @@
|
||||||
import cluster, { Worker } from 'cluster';
|
|
||||||
import * as werelogs from 'werelogs';
|
|
||||||
|
|
||||||
import { default as errors } from '../../lib/errors';
|
|
||||||
|
|
||||||
const rpcLogger = new werelogs.Logger('ClusterRPC');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remote procedure calls support between cluster workers.
|
|
||||||
*
|
|
||||||
* When using the cluster module, new processes are forked and are
|
|
||||||
* dispatched workloads, usually HTTP requests. The ClusterRPC module
|
|
||||||
* implements a RPC system to send commands to all cluster worker
|
|
||||||
* processes at once from any particular worker, and retrieve their
|
|
||||||
* individual command results, like a distributed map operation.
|
|
||||||
*
|
|
||||||
* The existing nodejs cluster IPC channel is setup from the primary
|
|
||||||
* to each worker, but not between workers, so there has to be a hop
|
|
||||||
* by the primary.
|
|
||||||
*
|
|
||||||
* How a command is treated:
|
|
||||||
*
|
|
||||||
* - a worker sends a command message to the primary
|
|
||||||
*
|
|
||||||
* - the primary then forwards that command to each existing worker
|
|
||||||
* (including the requestor)
|
|
||||||
*
|
|
||||||
* - each worker then executes the command and returns a result or an
|
|
||||||
* error
|
|
||||||
*
|
|
||||||
* - the primary gathers all workers results into an array
|
|
||||||
*
|
|
||||||
* - finally, the primary dispatches the results array to the original
|
|
||||||
* requesting worker
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* Limitations:
|
|
||||||
*
|
|
||||||
* - The command payload must be serializable, which means that:
|
|
||||||
* - it should not contain circular references
|
|
||||||
* - it should be of a reasonable size to be sent in a single RPC message
|
|
||||||
*
|
|
||||||
* - The "toWorkers" parameter of value "*" targets the set of workers
|
|
||||||
* that are available at the time the command is dispatched. Any new
|
|
||||||
* worker spawned after the command has been dispatched for
|
|
||||||
* processing, but before the command completes, don't execute
|
|
||||||
* the command and hence are not part of the results array.
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* To set it up:
|
|
||||||
*
|
|
||||||
* - On the primary:
|
|
||||||
* if (cluster.isPrimary) {
|
|
||||||
* setupRPCPrimary();
|
|
||||||
* }
|
|
||||||
*
|
|
||||||
* - On the workers:
|
|
||||||
* if (!cluster.isPrimary) {
|
|
||||||
* setupRPCWorker({
|
|
||||||
* handler1: (payload: object, uids: string, callback: HandlerCallback) => void,
|
|
||||||
* handler2: ...
|
|
||||||
* });
|
|
||||||
* }
|
|
||||||
* Handler functions will be passed the command payload, request
|
|
||||||
* serialized uids, and must call the callback when the worker is done
|
|
||||||
* processing the command:
|
|
||||||
* callback(error: Error | null | undefined, result?: any)
|
|
||||||
*
|
|
||||||
* When this setup is done, any worker can start sending commands by calling
|
|
||||||
* the async function sendWorkerCommand().
|
|
||||||
*/
|
|
||||||
|
|
||||||
// exported types
|
|
||||||
|
|
||||||
export type ResultObject = {
|
|
||||||
error: Error | null;
|
|
||||||
result: any;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* saved Promise for sendWorkerCommand
|
|
||||||
*/
|
|
||||||
export type CommandPromise = {
|
|
||||||
resolve: (results?: ResultObject[]) => void;
|
|
||||||
reject: (error: Error) => void;
|
|
||||||
timeout: NodeJS.Timeout | null;
|
|
||||||
};
|
|
||||||
export type HandlerCallback = (error: (Error & { code?: number }) | null | undefined, result?: any) => void;
|
|
||||||
export type HandlerFunction = (payload: object, uids: string, callback: HandlerCallback) => void;
|
|
||||||
export type HandlersMap = {
|
|
||||||
[index: string]: HandlerFunction;
|
|
||||||
};
|
|
||||||
export type PrimaryHandlerFunction = (worker: Worker, payload: object, uids: string, callback: HandlerCallback) => void;
|
|
||||||
export type PrimaryHandlersMap = Record<string, PrimaryHandlerFunction>;
|
|
||||||
|
|
||||||
// private types
|
|
||||||
|
|
||||||
type RPCMessage<T extends string, P> = {
|
|
||||||
type: T;
|
|
||||||
uids: string;
|
|
||||||
payload: P;
|
|
||||||
};
|
|
||||||
|
|
||||||
type RPCCommandMessage = RPCMessage<'cluster-rpc:command', any> & {
|
|
||||||
toWorkers: string;
|
|
||||||
toHandler: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
type MarshalledResultObject = {
|
|
||||||
error: string | null;
|
|
||||||
errorCode?: number;
|
|
||||||
result: any;
|
|
||||||
};
|
|
||||||
|
|
||||||
type RPCCommandResultMessage = RPCMessage<'cluster-rpc:commandResult', MarshalledResultObject>;
|
|
||||||
|
|
||||||
type RPCCommandResultsMessage = RPCMessage<'cluster-rpc:commandResults', {
|
|
||||||
results: MarshalledResultObject[];
|
|
||||||
}>;
|
|
||||||
|
|
||||||
type RPCCommandErrorMessage = RPCMessage<'cluster-rpc:commandError', {
|
|
||||||
error: string;
|
|
||||||
}>;
|
|
||||||
|
|
||||||
interface RPCSetupOptions {
|
|
||||||
/**
|
|
||||||
* As werelogs is not a peerDependency, arsenal and a parent project
|
|
||||||
* might have their own separate versions duplicated in dependencies.
|
|
||||||
* The config are therefore not shared.
|
|
||||||
* Use this to propagate werelogs config to arsenal's ClusterRPC.
|
|
||||||
*/
|
|
||||||
werelogsConfig?: Parameters<typeof werelogs.configure>[0];
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* In primary: store worker IDs that are waiting to be dispatched
|
|
||||||
* their command's results, as a mapping.
|
|
||||||
*/
|
|
||||||
const uidsToWorkerId: {
|
|
||||||
[index: string]: number;
|
|
||||||
} = {};
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* In primary: store worker responses for commands in progress as a
|
|
||||||
* mapping.
|
|
||||||
*
|
|
||||||
* Result objects are 'null' while the worker is still processing the
|
|
||||||
* command. When a worker finishes processing it stores the result as:
|
|
||||||
* {
|
|
||||||
* error: string | null,
|
|
||||||
* result: any
|
|
||||||
* }
|
|
||||||
*/
|
|
||||||
const uidsToCommandResults: {
|
|
||||||
[index: string]: {
|
|
||||||
[index: number]: MarshalledResultObject | null;
|
|
||||||
};
|
|
||||||
} = {};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* In workers: store promise callbacks for commands waiting to be
|
|
||||||
* dispatched, as a mapping.
|
|
||||||
*/
|
|
||||||
const uidsToCommandPromise: {
|
|
||||||
[index: string]: CommandPromise;
|
|
||||||
} = {};
|
|
||||||
|
|
||||||
|
|
||||||
function _isRpcMessage(message) {
|
|
||||||
return (message !== null &&
|
|
||||||
typeof message === 'object' &&
|
|
||||||
typeof message.type === 'string' &&
|
|
||||||
message.type.startsWith('cluster-rpc:'));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Setup cluster RPC system on the primary
|
|
||||||
*
|
|
||||||
* @param {object} [handlers] - mapping of handler names to handler functions
|
|
||||||
* handler function:
|
|
||||||
* `handler({Worker} worker, {object} payload, {string} uids, {function} callback)`
|
|
||||||
* handler callback must be called when worker is done with the command:
|
|
||||||
* `callback({Error|null} error, {any} [result])`
|
|
||||||
* @return {undefined}
|
|
||||||
*/
|
|
||||||
export function setupRPCPrimary(handlers?: PrimaryHandlersMap, options?: RPCSetupOptions) {
|
|
||||||
if (options?.werelogsConfig) {
|
|
||||||
werelogs.configure(options.werelogsConfig);
|
|
||||||
}
|
|
||||||
cluster.on('message', (worker, message) => {
|
|
||||||
if (_isRpcMessage(message)) {
|
|
||||||
_handlePrimaryMessage(worker, message, handlers);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Setup RPCs on a cluster worker process
|
|
||||||
*
|
|
||||||
* @param {object} handlers - mapping of handler names to handler functions
|
|
||||||
* handler function:
|
|
||||||
* handler({object} payload, {string} uids, {function} callback)
|
|
||||||
* handler callback must be called when worker is done with the command:
|
|
||||||
* callback({Error|null} error, {any} [result])
|
|
||||||
* @return {undefined}
|
|
||||||
* }
|
|
||||||
*/
|
|
||||||
export function setupRPCWorker(handlers: HandlersMap, options?: RPCSetupOptions) {
|
|
||||||
if (!process.send) {
|
|
||||||
throw new Error('fatal: cannot setup cluster RPC: "process.send" is not available');
|
|
||||||
}
|
|
||||||
if (options?.werelogsConfig) {
|
|
||||||
werelogs.configure(options.werelogsConfig);
|
|
||||||
}
|
|
||||||
process.on('message', (message: RPCCommandMessage | RPCCommandResultsMessage) => {
|
|
||||||
if (_isRpcMessage(message)) {
|
|
||||||
_handleWorkerMessage(message, handlers);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Send a command for workers to execute in parallel, and wait for results
|
|
||||||
*
|
|
||||||
* @param {string} toWorkers - which workers should execute the command
|
|
||||||
* Currently the supported values are:
|
|
||||||
* - "*", meaning all workers will execute the command
|
|
||||||
* - "PRIMARY", meaning primary process will execute the command
|
|
||||||
* @param {string} toHandler - name of handler that will execute the
|
|
||||||
* command in workers, as declared in setupRPCWorker() parameter object
|
|
||||||
* @param {string} uids - unique identifier of the command, must be
|
|
||||||
* unique across all commands in progress
|
|
||||||
* @param {object} payload - message payload, sent as-is to the handler
|
|
||||||
* @param {number} [timeoutMs=60000] - timeout the command with a
|
|
||||||
* "RequestTimeout" error after this number of milliseconds - set to 0
|
|
||||||
* to disable timeouts (the command may then hang forever)
|
|
||||||
* @returns {Promise}
|
|
||||||
*/
|
|
||||||
export async function sendWorkerCommand(
|
|
||||||
toWorkers: string,
|
|
||||||
toHandler: string,
|
|
||||||
uids: string,
|
|
||||||
payload: object,
|
|
||||||
timeoutMs: number = 60000
|
|
||||||
) {
|
|
||||||
if (typeof uids !== 'string') {
|
|
||||||
rpcLogger.error('missing or invalid "uids" field', { uids });
|
|
||||||
throw errors.MissingParameter;
|
|
||||||
}
|
|
||||||
if (uidsToCommandPromise[uids] !== undefined) {
|
|
||||||
rpcLogger.error('a command is already in progress with same uids', { uids });
|
|
||||||
throw errors.OperationAborted;
|
|
||||||
}
|
|
||||||
rpcLogger.info('sending command', { toWorkers, toHandler, uids, payload });
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
let timeout: NodeJS.Timeout | null = null;
|
|
||||||
if (timeoutMs) {
|
|
||||||
timeout = setTimeout(() => {
|
|
||||||
delete uidsToCommandPromise[uids];
|
|
||||||
reject(errors.RequestTimeout);
|
|
||||||
}, timeoutMs);
|
|
||||||
}
|
|
||||||
uidsToCommandPromise[uids] = { resolve, reject, timeout };
|
|
||||||
const message: RPCCommandMessage = {
|
|
||||||
type: 'cluster-rpc:command',
|
|
||||||
toWorkers,
|
|
||||||
toHandler,
|
|
||||||
uids,
|
|
||||||
payload,
|
|
||||||
};
|
|
||||||
return process.send?.(message);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the number of commands in flight
|
|
||||||
* @returns {number}
|
|
||||||
*/
|
|
||||||
export function getPendingCommandsCount() {
|
|
||||||
return Object.keys(uidsToCommandPromise).length;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function _dispatchCommandResultsToWorker(
|
|
||||||
worker: Worker,
|
|
||||||
uids: string,
|
|
||||||
resultsArray: MarshalledResultObject[]
|
|
||||||
): void {
|
|
||||||
const message: RPCCommandResultsMessage = {
|
|
||||||
type: 'cluster-rpc:commandResults',
|
|
||||||
uids,
|
|
||||||
payload: {
|
|
||||||
results: resultsArray,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
worker.send(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _dispatchCommandErrorToWorker(
|
|
||||||
worker: Worker,
|
|
||||||
uids: string,
|
|
||||||
error: Error,
|
|
||||||
): void {
|
|
||||||
const message: RPCCommandErrorMessage = {
|
|
||||||
type: 'cluster-rpc:commandError',
|
|
||||||
uids,
|
|
||||||
payload: {
|
|
||||||
error: error.message,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
worker.send(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _sendPrimaryCommandResult(
|
|
||||||
worker: Worker,
|
|
||||||
uids: string,
|
|
||||||
error: (Error & { code?: number }) | null | undefined,
|
|
||||||
result?: any
|
|
||||||
): void {
|
|
||||||
const message: RPCCommandResultsMessage = {
|
|
||||||
type: 'cluster-rpc:commandResults',
|
|
||||||
uids,
|
|
||||||
payload: {
|
|
||||||
results: [{ error: error?.message || null, errorCode: error?.code, result }],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
worker.send?.(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handlePrimaryCommandMessage(
|
|
||||||
fromWorker: Worker,
|
|
||||||
logger: any,
|
|
||||||
message: RPCCommandMessage,
|
|
||||||
handlers?: PrimaryHandlersMap
|
|
||||||
): void {
|
|
||||||
const { toWorkers, toHandler, uids, payload } = message;
|
|
||||||
if (toWorkers === '*') {
|
|
||||||
if (uidsToWorkerId[uids] !== undefined) {
|
|
||||||
logger.warn('new command already has a waiting worker with same uids', {
|
|
||||||
uids, workerId: uidsToWorkerId[uids],
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const commandResults = {};
|
|
||||||
for (const workerId of Object.keys(cluster.workers || {})) {
|
|
||||||
commandResults[workerId] = null;
|
|
||||||
}
|
|
||||||
uidsToWorkerId[uids] = fromWorker?.id;
|
|
||||||
uidsToCommandResults[uids] = commandResults;
|
|
||||||
|
|
||||||
for (const [workerId, worker] of Object.entries(cluster.workers || {})) {
|
|
||||||
logger.debug('sending command message to worker', {
|
|
||||||
workerId, toHandler, payload,
|
|
||||||
});
|
|
||||||
if (worker) {
|
|
||||||
worker.send(message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (toWorkers === 'PRIMARY') {
|
|
||||||
const { toHandler, uids, payload } = message;
|
|
||||||
const cb: HandlerCallback = (err, result) => _sendPrimaryCommandResult(fromWorker, uids, err, result);
|
|
||||||
|
|
||||||
if (toHandler in (handlers || {})) {
|
|
||||||
return handlers![toHandler](fromWorker, payload, uids, cb);
|
|
||||||
}
|
|
||||||
logger.error('no such handler in "toHandler" field from worker command message', {
|
|
||||||
toHandler,
|
|
||||||
});
|
|
||||||
return cb(errors.NotImplemented);
|
|
||||||
} else {
|
|
||||||
logger.error('unsupported "toWorkers" field from worker command message', {
|
|
||||||
toWorkers,
|
|
||||||
});
|
|
||||||
if (fromWorker) {
|
|
||||||
_dispatchCommandErrorToWorker(fromWorker, uids, errors.NotImplemented);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handlePrimaryCommandResultMessage(
|
|
||||||
fromWorkerId: number,
|
|
||||||
logger: any,
|
|
||||||
message: RPCCommandResultMessage
|
|
||||||
): void {
|
|
||||||
const { uids, payload } = message;
|
|
||||||
const commandResults = uidsToCommandResults[uids];
|
|
||||||
if (!commandResults) {
|
|
||||||
logger.warn('received command response message from worker for command not in flight', {
|
|
||||||
workerId: fromWorkerId,
|
|
||||||
uids,
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
if (commandResults[fromWorkerId] === undefined) {
|
|
||||||
logger.warn('received command response message with unexpected worker ID', {
|
|
||||||
workerId: fromWorkerId,
|
|
||||||
uids,
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
if (commandResults[fromWorkerId] !== null) {
|
|
||||||
logger.warn('ignoring duplicate command response from worker', {
|
|
||||||
workerId: fromWorkerId,
|
|
||||||
uids,
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
commandResults[fromWorkerId] = payload;
|
|
||||||
const commandResultsArray = Object.values(commandResults);
|
|
||||||
if (commandResultsArray.every(response => response !== null)) {
|
|
||||||
logger.debug('all workers responded to command', { uids });
|
|
||||||
const completeCommandResultsArray = <MarshalledResultObject[]> commandResultsArray;
|
|
||||||
const toWorkerId = uidsToWorkerId[uids];
|
|
||||||
const toWorker = cluster.workers?.[toWorkerId];
|
|
||||||
|
|
||||||
delete uidsToCommandResults[uids];
|
|
||||||
delete uidsToWorkerId[uids];
|
|
||||||
|
|
||||||
if (!toWorker) {
|
|
||||||
logger.warn('worker shut down while its command was executing', {
|
|
||||||
workerId: toWorkerId, uids,
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
// send back response to original worker
|
|
||||||
_dispatchCommandResultsToWorker(toWorker, uids, completeCommandResultsArray);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handlePrimaryMessage(
|
|
||||||
fromWorker: Worker,
|
|
||||||
message: RPCCommandMessage | RPCCommandResultMessage,
|
|
||||||
handlers?: PrimaryHandlersMap
|
|
||||||
): void {
|
|
||||||
const { type: messageType, uids } = message;
|
|
||||||
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
|
|
||||||
logger.debug('primary received message from worker', {
|
|
||||||
workerId: fromWorker?.id, rpcMessage: message,
|
|
||||||
});
|
|
||||||
if (messageType === 'cluster-rpc:command') {
|
|
||||||
return _handlePrimaryCommandMessage(fromWorker, logger, message, handlers);
|
|
||||||
}
|
|
||||||
if (messageType === 'cluster-rpc:commandResult') {
|
|
||||||
return _handlePrimaryCommandResultMessage(fromWorker?.id, logger, message);
|
|
||||||
}
|
|
||||||
logger.error('unsupported message type', {
|
|
||||||
workerId: fromWorker?.id, messageType, uids,
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
function _sendWorkerCommandResult(
|
|
||||||
uids: string,
|
|
||||||
error: Error | null | undefined,
|
|
||||||
result?: any
|
|
||||||
): void {
|
|
||||||
const message: RPCCommandResultMessage = {
|
|
||||||
type: 'cluster-rpc:commandResult',
|
|
||||||
uids,
|
|
||||||
payload: {
|
|
||||||
error: error ? error.message : null,
|
|
||||||
result,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
process.send?.(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handleWorkerCommandMessage(
|
|
||||||
logger: any,
|
|
||||||
message: RPCCommandMessage,
|
|
||||||
handlers: HandlersMap
|
|
||||||
): void {
|
|
||||||
const { toHandler, uids, payload } = message;
|
|
||||||
const cb: HandlerCallback = (err, result) => _sendWorkerCommandResult(uids, err, result);
|
|
||||||
|
|
||||||
if (toHandler in handlers) {
|
|
||||||
return handlers[toHandler](payload, uids, cb);
|
|
||||||
}
|
|
||||||
logger.error('no such handler in "toHandler" field from worker command message', {
|
|
||||||
toHandler,
|
|
||||||
});
|
|
||||||
return cb(errors.NotImplemented);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handleWorkerCommandResultsMessage(
|
|
||||||
logger: any,
|
|
||||||
message: RPCCommandResultsMessage,
|
|
||||||
): void {
|
|
||||||
const { uids, payload } = message;
|
|
||||||
const { results } = payload;
|
|
||||||
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
|
|
||||||
if (commandPromise === undefined) {
|
|
||||||
logger.error('missing promise for command results', { uids, payload });
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
if (commandPromise.timeout) {
|
|
||||||
clearTimeout(commandPromise.timeout);
|
|
||||||
}
|
|
||||||
delete uidsToCommandPromise[uids];
|
|
||||||
const unmarshalledResults = results.map(workerResult => {
|
|
||||||
let workerError: Error | null = null;
|
|
||||||
if (workerResult.error) {
|
|
||||||
if (workerResult.error in errors) {
|
|
||||||
workerError = errors[workerResult.error];
|
|
||||||
} else {
|
|
||||||
workerError = new Error(workerResult.error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (workerError && workerResult.errorCode) {
|
|
||||||
(workerError as Error & { code: number }).code = workerResult.errorCode;
|
|
||||||
}
|
|
||||||
const unmarshalledResult: ResultObject = {
|
|
||||||
error: workerError,
|
|
||||||
result: workerResult.result,
|
|
||||||
};
|
|
||||||
return unmarshalledResult;
|
|
||||||
});
|
|
||||||
return commandPromise.resolve(unmarshalledResults);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handleWorkerCommandErrorMessage(
|
|
||||||
logger: any,
|
|
||||||
message: RPCCommandErrorMessage,
|
|
||||||
): void {
|
|
||||||
const { uids, payload } = message;
|
|
||||||
const { error } = payload;
|
|
||||||
const commandPromise: CommandPromise = uidsToCommandPromise[uids];
|
|
||||||
if (commandPromise === undefined) {
|
|
||||||
logger.error('missing promise for command results', { uids, payload });
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
if (commandPromise.timeout) {
|
|
||||||
clearTimeout(commandPromise.timeout);
|
|
||||||
}
|
|
||||||
delete uidsToCommandPromise[uids];
|
|
||||||
let commandError: Error | null = null;
|
|
||||||
if (error in errors) {
|
|
||||||
commandError = errors[error];
|
|
||||||
} else {
|
|
||||||
commandError = new Error(error);
|
|
||||||
}
|
|
||||||
return commandPromise.reject(<Error> commandError);
|
|
||||||
}
|
|
||||||
|
|
||||||
function _handleWorkerMessage(
|
|
||||||
message: RPCCommandMessage | RPCCommandResultsMessage | RPCCommandErrorMessage,
|
|
||||||
handlers: HandlersMap
|
|
||||||
): void {
|
|
||||||
const { type: messageType, uids } = message;
|
|
||||||
const workerId = cluster.worker?.id;
|
|
||||||
const logger = rpcLogger.newRequestLoggerFromSerializedUids(uids);
|
|
||||||
logger.debug('worker received message from primary', {
|
|
||||||
workerId, rpcMessage: message,
|
|
||||||
});
|
|
||||||
if (messageType === 'cluster-rpc:command') {
|
|
||||||
return _handleWorkerCommandMessage(logger, message, handlers);
|
|
||||||
}
|
|
||||||
if (messageType === 'cluster-rpc:commandResults') {
|
|
||||||
return _handleWorkerCommandResultsMessage(logger, message);
|
|
||||||
}
|
|
||||||
if (messageType === 'cluster-rpc:commandError') {
|
|
||||||
return _handleWorkerCommandErrorMessage(logger, message);
|
|
||||||
}
|
|
||||||
logger.error('unsupported message type', {
|
|
||||||
workerId, messageType,
|
|
||||||
});
|
|
||||||
return undefined;
|
|
||||||
}
|
|
|
@ -2,18 +2,18 @@ import * as crypto from 'crypto';
|
||||||
|
|
||||||
// The min value here is to manage further backward compat if we
|
// The min value here is to manage further backward compat if we
|
||||||
// need it
|
// need it
|
||||||
// Default value
|
const iamSecurityTokenSizeMin = 128;
|
||||||
export const vaultGeneratedIamSecurityTokenSizeMin = 128;
|
const iamSecurityTokenSizeMax = 128;
|
||||||
// Safe to assume that a typical token size is less than 8192 bytes
|
// Security token is an hex string (no real format from amazon)
|
||||||
export const vaultGeneratedIamSecurityTokenSizeMax = 8192;
|
const iamSecurityTokenPattern = new RegExp(
|
||||||
// Base-64
|
`^[a-f0-9]{${iamSecurityTokenSizeMin},${iamSecurityTokenSizeMax}}$`,
|
||||||
export const vaultGeneratedIamSecurityTokenPattern = /^[A-Za-z0-9/+=]*$/;
|
);
|
||||||
|
|
||||||
// info about the iam security token
|
// info about the iam security token
|
||||||
export const iamSecurityToken = {
|
export const iamSecurityToken = {
|
||||||
min: vaultGeneratedIamSecurityTokenSizeMin,
|
min: iamSecurityTokenSizeMin,
|
||||||
max: vaultGeneratedIamSecurityTokenSizeMax,
|
max: iamSecurityTokenSizeMax,
|
||||||
pattern: vaultGeneratedIamSecurityTokenPattern,
|
pattern: iamSecurityTokenPattern,
|
||||||
};
|
};
|
||||||
// PublicId is used as the canonicalID for a request that contains
|
// PublicId is used as the canonicalID for a request that contains
|
||||||
// no authentication information. Requestor can access
|
// no authentication information. Requestor can access
|
||||||
|
@ -22,7 +22,6 @@ export const publicId = 'http://acs.amazonaws.com/groups/global/AllUsers';
|
||||||
export const zenkoServiceAccount = 'http://acs.zenko.io/accounts/service';
|
export const zenkoServiceAccount = 'http://acs.zenko.io/accounts/service';
|
||||||
export const metadataFileNamespace = '/MDFile';
|
export const metadataFileNamespace = '/MDFile';
|
||||||
export const dataFileURL = '/DataFile';
|
export const dataFileURL = '/DataFile';
|
||||||
export const passthroughFileURL = '/PassthroughFile';
|
|
||||||
// AWS states max size for user-defined metadata
|
// AWS states max size for user-defined metadata
|
||||||
// (x-amz-meta- headers) is 2 KB:
|
// (x-amz-meta- headers) is 2 KB:
|
||||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
|
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html
|
||||||
|
@ -32,16 +31,7 @@ export const maximumMetaHeadersSize = 2136;
|
||||||
export const emptyFileMd5 = 'd41d8cd98f00b204e9800998ecf8427e';
|
export const emptyFileMd5 = 'd41d8cd98f00b204e9800998ecf8427e';
|
||||||
// Version 2 changes the format of the data location property
|
// Version 2 changes the format of the data location property
|
||||||
// Version 3 adds the dataStoreName attribute
|
// Version 3 adds the dataStoreName attribute
|
||||||
// Version 4 add the Creation-Time and Content-Language attributes,
|
export const mdModelVersion = 3;
|
||||||
// and add support for x-ms-meta-* headers in UserMetadata
|
|
||||||
// Version 5 adds the azureInfo structure
|
|
||||||
// Version 6 adds a "deleted" flag that is updated to true before
|
|
||||||
// the object gets deleted. This is done to keep object metadata in the
|
|
||||||
// oplog when deleting the object, as oplog deletion events don't contain
|
|
||||||
// any metadata of the object.
|
|
||||||
// version 6 also adds the "isPHD" flag that is used to indicate that the master
|
|
||||||
// object is a placeholder and is not up to date.
|
|
||||||
export const mdModelVersion = 6;
|
|
||||||
/*
|
/*
|
||||||
* Splitter is used to build the object name for the overview of a
|
* Splitter is used to build the object name for the overview of a
|
||||||
* multipart upload and to build the object names for each part of a
|
* multipart upload and to build the object names for each part of a
|
||||||
|
@ -81,45 +71,19 @@ export const mpuBucketPrefix = 'mpuShadowBucket';
|
||||||
export const permittedCapitalizedBuckets = {
|
export const permittedCapitalizedBuckets = {
|
||||||
METADATA: true,
|
METADATA: true,
|
||||||
};
|
};
|
||||||
// Setting a lower object key limit to account for:
|
|
||||||
// - Mongo key limit of 1012 bytes
|
|
||||||
// - Version ID in Mongo Key if versioned of 33
|
|
||||||
// - Max bucket name length if bucket match false of 63
|
|
||||||
// - Extra prefix slash for bucket prefix if bucket match of 1
|
|
||||||
export const objectKeyByteLimit = 915;
|
|
||||||
/* delimiter for location-constraint. The location constraint will be able
|
|
||||||
* to include the ingestion flag
|
|
||||||
*/
|
|
||||||
export const zenkoSeparator = ':';
|
|
||||||
/* eslint-disable camelcase */
|
/* eslint-disable camelcase */
|
||||||
export const externalBackends = { aws_s3: true, azure: true, gcp: true, pfs: true };
|
export const externalBackends = { aws_s3: true, azure: true, gcp: true, pfs: true }
|
||||||
export const replicationBackends = { aws_s3: true, azure: true, gcp: true };
|
export const hasCopyPartBackends = { aws_s3: true, gcp: true }
|
||||||
// hex digest of sha256 hash of empty string:
|
export const versioningNotImplBackends = { azure: true, gcp: true }
|
||||||
export const emptyStringHash = crypto.createHash('sha256')
|
export const mpuMDStoredExternallyBackend = { aws_s3: true, gcp: true }
|
||||||
.update('', 'binary').digest('hex');
|
|
||||||
export const mpuMDStoredExternallyBackend = { aws_s3: true, gcp: true };
|
|
||||||
// AWS sets a minimum size limit for parts except for the last part.
|
// AWS sets a minimum size limit for parts except for the last part.
|
||||||
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
|
// http://docs.aws.amazon.com/AmazonS3/latest/API/mpUploadComplete.html
|
||||||
export const minimumAllowedPartSize = 5242880;
|
export const minimumAllowedPartSize = 5242880;
|
||||||
export const gcpMaximumAllowedPartCount = 1024;
|
// hex digest of sha256 hash of empty string:
|
||||||
// GCP Object Tagging Prefix
|
export const emptyStringHash = crypto.createHash('sha256').update('', 'binary').digest('hex');
|
||||||
export const gcpTaggingPrefix = 'aws-tag-';
|
|
||||||
export const productName = 'APN/1.0 Scality/1.0 Scality CloudServer for Zenko';
|
|
||||||
export const legacyLocations = ['sproxyd', 'legacy'];
|
|
||||||
// healthcheck default call from nginx is every 2 seconds
|
|
||||||
// for external backends, don't call unless at least 1 minute
|
|
||||||
// (60,000 milliseconds) since last call
|
|
||||||
export const externalBackendHealthCheckInterval = 60000;
|
|
||||||
// some of the available data backends (if called directly rather
|
|
||||||
// than through the multiple backend gateway) need a key provided
|
|
||||||
// as a string as first parameter of the get/delete methods.
|
|
||||||
export const clientsRequireStringKey = { sproxyd: true, cdmi: true };
|
|
||||||
export const hasCopyPartBackends = { aws_s3: true, gcp: true };
|
|
||||||
export const versioningNotImplBackends = { azure: true, gcp: true };
|
|
||||||
// user metadata applied on zenko-created objects
|
|
||||||
export const zenkoIDHeader = 'x-amz-meta-zenko-instance-id';
|
|
||||||
// Default expiration value of the S3 pre-signed URL duration
|
// Default expiration value of the S3 pre-signed URL duration
|
||||||
// 604800 seconds (seven days).
|
// 604800 seconds (seven days).
|
||||||
|
export const legacyLocations = ['sproxyd', 'legacy'];
|
||||||
export const defaultPreSignedURLExpiry = 7 * 24 * 60 * 60;
|
export const defaultPreSignedURLExpiry = 7 * 24 * 60 * 60;
|
||||||
// Regex for ISO-8601 formatted date
|
// Regex for ISO-8601 formatted date
|
||||||
export const shortIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/;
|
export const shortIso8601Regex = /\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z/;
|
||||||
|
@ -132,21 +96,16 @@ export const supportedNotificationEvents = new Set([
|
||||||
's3:ObjectRemoved:*',
|
's3:ObjectRemoved:*',
|
||||||
's3:ObjectRemoved:Delete',
|
's3:ObjectRemoved:Delete',
|
||||||
's3:ObjectRemoved:DeleteMarkerCreated',
|
's3:ObjectRemoved:DeleteMarkerCreated',
|
||||||
's3:Replication:OperationFailedReplication',
|
|
||||||
's3:ObjectTagging:*',
|
's3:ObjectTagging:*',
|
||||||
's3:ObjectTagging:Put',
|
's3:ObjectTagging:Put',
|
||||||
's3:ObjectTagging:Delete',
|
's3:ObjectTagging:Delete',
|
||||||
's3:ObjectAcl:Put',
|
's3:ObjectAcl:Put',
|
||||||
's3:ObjectRestore:*',
|
|
||||||
's3:ObjectRestore:Post',
|
|
||||||
's3:ObjectRestore:Completed',
|
|
||||||
's3:ObjectRestore:Delete',
|
|
||||||
's3:LifecycleTransition',
|
|
||||||
's3:LifecycleExpiration:*',
|
|
||||||
's3:LifecycleExpiration:DeleteMarkerCreated',
|
|
||||||
's3:LifecycleExpiration:Delete',
|
|
||||||
]);
|
]);
|
||||||
export const notificationArnPrefix = 'arn:scality:bucketnotif';
|
export const notificationArnPrefix = 'arn:scality:bucketnotif';
|
||||||
|
// some of the available data backends (if called directly rather
|
||||||
|
// than through the multiple backend gateway) need a key provided
|
||||||
|
// as a string as first parameter of the get/delete methods.
|
||||||
|
export const clientsRequireStringKey = { sproxyd: true, cdmi: true };
|
||||||
// HTTP server keep-alive timeout is set to a higher value than
|
// HTTP server keep-alive timeout is set to a higher value than
|
||||||
// client's free sockets timeout to avoid the risk of triggering
|
// client's free sockets timeout to avoid the risk of triggering
|
||||||
// ECONNRESET errors if the server closes the connection at the
|
// ECONNRESET errors if the server closes the connection at the
|
||||||
|
@ -163,15 +122,7 @@ export const supportedLifecycleRules = [
|
||||||
'expiration',
|
'expiration',
|
||||||
'noncurrentVersionExpiration',
|
'noncurrentVersionExpiration',
|
||||||
'abortIncompleteMultipartUpload',
|
'abortIncompleteMultipartUpload',
|
||||||
'transitions',
|
|
||||||
'noncurrentVersionTransition',
|
|
||||||
];
|
];
|
||||||
// Maximum number of buckets to cache (bucket metadata)
|
// Maximum number of buckets to cache (bucket metadata)
|
||||||
export const maxCachedBuckets = process.env.METADATA_MAX_CACHED_BUCKETS ?
|
export const maxCachedBuckets = process.env.METADATA_MAX_CACHED_BUCKETS ?
|
||||||
Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000;
|
Number(process.env.METADATA_MAX_CACHED_BUCKETS) : 1000;
|
||||||
|
|
||||||
export const validRestoreObjectTiers = new Set(['Expedited', 'Standard', 'Bulk']);
|
|
||||||
export const maxBatchingConcurrentOperations = 5;
|
|
||||||
|
|
||||||
/** For policy resource arn check we allow empty account ID to not break compatibility */
|
|
||||||
export const policyArnAllowedEmptyAccountId = ['utapi', 'scuba'];
|
|
||||||
|
|
|
@ -148,7 +148,7 @@ export class IndexTransaction {
|
||||||
'missing condition for conditional put'
|
'missing condition for conditional put'
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if (typeof condition.notExists !== 'string' && typeof condition.exists !== 'string') {
|
if (typeof condition.notExists !== 'string') {
|
||||||
throw propError(
|
throw propError(
|
||||||
'unsupportedConditionalOperation',
|
'unsupportedConditionalOperation',
|
||||||
'missing key or supported condition'
|
'missing key or supported condition'
|
||||||
|
|
|
@ -690,11 +690,6 @@ export const ReportNotPresent: ErrorFormat = {
|
||||||
'The request was rejected because the credential report does not exist. To generate a credential report, use GenerateCredentialReport.',
|
'The request was rejected because the credential report does not exist. To generate a credential report, use GenerateCredentialReport.',
|
||||||
};
|
};
|
||||||
|
|
||||||
export const Found: ErrorFormat = {
|
|
||||||
code: 302,
|
|
||||||
description: 'Resource Found'
|
|
||||||
};
|
|
||||||
|
|
||||||
// ------------- Special non-AWS S3 errors -------------
|
// ------------- Special non-AWS S3 errors -------------
|
||||||
|
|
||||||
export const MPUinProgress: ErrorFormat = {
|
export const MPUinProgress: ErrorFormat = {
|
||||||
|
@ -1042,15 +1037,3 @@ export const AuthMethodNotImplemented: ErrorFormat = {
|
||||||
description: 'AuthMethodNotImplemented',
|
description: 'AuthMethodNotImplemented',
|
||||||
code: 501,
|
code: 501,
|
||||||
};
|
};
|
||||||
|
|
||||||
// --------------------- quotaErros ---------------------
|
|
||||||
|
|
||||||
export const NoSuchQuota: ErrorFormat = {
|
|
||||||
code: 404,
|
|
||||||
description: 'The specified resource does not have a quota.',
|
|
||||||
};
|
|
||||||
|
|
||||||
export const QuotaExceeded: ErrorFormat = {
|
|
||||||
code: 429,
|
|
||||||
description: 'The quota set for the resource is exceeded.',
|
|
||||||
};
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ import type { ServerResponse } from 'http';
|
||||||
import * as rawErrors from './arsenalErrors';
|
import * as rawErrors from './arsenalErrors';
|
||||||
|
|
||||||
/** All possible errors names. */
|
/** All possible errors names. */
|
||||||
export type Name = keyof typeof rawErrors;
|
export type Name = keyof typeof rawErrors
|
||||||
/** Object containing all errors names. It has the format { [Name]: "Name" } */
|
/** Object containing all errors names. It has the format { [Name]: "Name" } */
|
||||||
export type Names = { [Name_ in Name]: Name_ };
|
export type Names = { [Name_ in Name]: Name_ };
|
||||||
/** Mapping used to determine an error type. It has the format { [Name]: boolean } */
|
/** Mapping used to determine an error type. It has the format { [Name]: boolean } */
|
||||||
|
@ -13,7 +13,7 @@ export type Errors = { [_ in Name]: ArsenalError };
|
||||||
// This object is reused constantly through createIs, we store it there
|
// This object is reused constantly through createIs, we store it there
|
||||||
// to avoid recomputation.
|
// to avoid recomputation.
|
||||||
const isBase = Object.fromEntries(
|
const isBase = Object.fromEntries(
|
||||||
Object.keys(rawErrors).map((key) => [key, false])
|
Object.keys(rawErrors).map(key => [key, false])
|
||||||
) as Is;
|
) as Is;
|
||||||
|
|
||||||
// This allows to conditionally add the old behavior of errors to properly
|
// This allows to conditionally add the old behavior of errors to properly
|
||||||
|
@ -32,7 +32,7 @@ export const allowUnsafeErrComp = (
|
||||||
// the Proxy will return false.
|
// the Proxy will return false.
|
||||||
const createIs = (type: Name): Is => {
|
const createIs = (type: Name): Is => {
|
||||||
const get = (is: Is, value: string | symbol) => is[value] ?? false;
|
const get = (is: Is, value: string | symbol) => is[value] ?? false;
|
||||||
const final = Object.freeze({ ...isBase, [type]: true });
|
const final = Object.freeze({ ...isBase, [type]: true })
|
||||||
return new Proxy(final, { get });
|
return new Proxy(final, { get });
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -46,18 +46,13 @@ export class ArsenalError extends Error {
|
||||||
/** Object used to determine the error type.
|
/** Object used to determine the error type.
|
||||||
* Example: error.is.InternalError */
|
* Example: error.is.InternalError */
|
||||||
#is: Is;
|
#is: Is;
|
||||||
/** A map of error metadata (can be extra fields
|
|
||||||
* that only show in debug mode) */
|
|
||||||
#metadata: Map<string, Object[]>;
|
|
||||||
|
|
||||||
private constructor(type: Name, code: number, description: string,
|
private constructor(type: Name, code: number, description: string) {
|
||||||
metadata?: Map<string, Object[]>) {
|
|
||||||
super(type);
|
super(type);
|
||||||
this.#code = code;
|
this.#code = code;
|
||||||
this.#description = description;
|
this.#description = description;
|
||||||
this.#type = type;
|
this.#type = type;
|
||||||
this.#is = createIs(type);
|
this.#is = createIs(type);
|
||||||
this.#metadata = metadata ?? new Map<string, Object[]>();
|
|
||||||
|
|
||||||
// This restores the old behavior of errors, to make sure they're now
|
// This restores the old behavior of errors, to make sure they're now
|
||||||
// backward-compatible. Fortunately it's handled by TS, but it cannot
|
// backward-compatible. Fortunately it's handled by TS, but it cannot
|
||||||
|
@ -111,22 +106,7 @@ export class ArsenalError extends Error {
|
||||||
customizeDescription(description: string): ArsenalError {
|
customizeDescription(description: string): ArsenalError {
|
||||||
const type = this.#type;
|
const type = this.#type;
|
||||||
const code = this.#code;
|
const code = this.#code;
|
||||||
const metadata = new Map(this.#metadata);
|
return new ArsenalError(type, code, description);
|
||||||
const err = new ArsenalError(type, code, description, metadata);
|
|
||||||
err.stack = this.stack;
|
|
||||||
return err;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Clone the error with a new metadata field */
|
|
||||||
addMetadataEntry(key: string, value: Object[]): ArsenalError {
|
|
||||||
const type = this.#type;
|
|
||||||
const code = this.#code;
|
|
||||||
const description = this.#description;
|
|
||||||
const metadata = new Map(this.#metadata);
|
|
||||||
metadata.set(key, value);
|
|
||||||
const err = new ArsenalError(type, code, description, metadata);
|
|
||||||
err.stack = this.stack;
|
|
||||||
return err;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Used to determine the error type. Example: error.is.InternalError */
|
/** Used to determine the error type. Example: error.is.InternalError */
|
||||||
|
@ -151,14 +131,9 @@ export class ArsenalError extends Error {
|
||||||
return this.#type;
|
return this.#type;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** A map of error metadata */
|
|
||||||
get metadata() {
|
|
||||||
return this.#metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Generate all possible errors. An instance is created by default. */
|
/** Generate all possible errors. An instance is created by default. */
|
||||||
static errors() {
|
static errors() {
|
||||||
const errors = {};
|
const errors = {}
|
||||||
Object.entries(rawErrors).forEach((value) => {
|
Object.entries(rawErrors).forEach((value) => {
|
||||||
const name = value[0] as Name;
|
const name = value[0] as Name;
|
||||||
const error = value[1];
|
const error = value[1];
|
||||||
|
@ -166,7 +141,7 @@ export class ArsenalError extends Error {
|
||||||
const get = () => new ArsenalError(name, code, description);
|
const get = () => new ArsenalError(name, code, description);
|
||||||
Object.defineProperty(errors, name, { get });
|
Object.defineProperty(errors, name, { get });
|
||||||
});
|
});
|
||||||
return errors as Errors;
|
return errors as Errors
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,8 +7,8 @@
|
||||||
"test": "mocha --recursive --timeout 5500 tests/unit"
|
"test": "mocha --recursive --timeout 5500 tests/unit"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"mocha": "5.2.0",
|
"mocha": "2.5.3",
|
||||||
"async": "~2.6.1",
|
"async": "^2.6.0",
|
||||||
"node-forge": "^0.7.1"
|
"node-forge": "^0.7.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,32 +20,7 @@ export default class RedisClient {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** increment value of a key by 1 and set a ttl */
|
||||||
* scan a pattern and return matching keys
|
|
||||||
* @param pattern - string pattern to match with all existing keys
|
|
||||||
* @param [count=10] - scan count
|
|
||||||
* @param cb - callback (error, result)
|
|
||||||
*/
|
|
||||||
scan(pattern: string, count = 10, cb: Callback) {
|
|
||||||
const params = { match: pattern, count };
|
|
||||||
const keys: any[] = [];
|
|
||||||
|
|
||||||
const stream = this._client.scanStream(params);
|
|
||||||
stream.on('data', resultKeys => {
|
|
||||||
for (let i = 0; i < resultKeys.length; i++) {
|
|
||||||
keys.push(resultKeys[i]);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
stream.on('end', () => {
|
|
||||||
cb(null, keys);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/** increment value of a key by 1 and set a ttl
|
|
||||||
* @param key - key holding the value
|
|
||||||
* @param expiry - expiry in seconds
|
|
||||||
* @param cb - callback
|
|
||||||
*/
|
|
||||||
incrEx(key: string, expiry: number, cb: Callback) {
|
incrEx(key: string, expiry: number, cb: Callback) {
|
||||||
const exp = expiry.toString();
|
const exp = expiry.toString();
|
||||||
return this._client
|
return this._client
|
||||||
|
@ -53,22 +28,7 @@ export default class RedisClient {
|
||||||
.exec(cb);
|
.exec(cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** increment value of a key by a given amount and set a ttl */
|
||||||
* increment value of a key by a given amount
|
|
||||||
* @param key - key holding the value
|
|
||||||
* @param amount - amount to increase by
|
|
||||||
* @param cb - callback
|
|
||||||
*/
|
|
||||||
incrby(key: string, amount: number, cb: Callback) {
|
|
||||||
return this._client.incrby(key, amount, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/** increment value of a key by a given amount and set a ttl
|
|
||||||
* @param key - key holding the value
|
|
||||||
* @param amount - amount to increase by
|
|
||||||
* @param expiry - expiry in seconds
|
|
||||||
* @param cb - callback
|
|
||||||
*/
|
|
||||||
incrbyEx(key: string, amount: number, expiry: number, cb: Callback) {
|
incrbyEx(key: string, amount: number, expiry: number, cb: Callback) {
|
||||||
const am = amount.toString();
|
const am = amount.toString();
|
||||||
const exp = expiry.toString();
|
const exp = expiry.toString();
|
||||||
|
@ -77,29 +37,13 @@ export default class RedisClient {
|
||||||
.exec(cb);
|
.exec(cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/** execute a batch of commands */
|
||||||
* decrement value of a key by a given amount
|
|
||||||
* @param key - key holding the value
|
|
||||||
* @param amount - amount to increase by
|
|
||||||
* @param cb - callback
|
|
||||||
*/
|
|
||||||
decrby(key: string, amount: number, cb: Callback) {
|
|
||||||
return this._client.decrby(key, amount, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* execute a batch of commands
|
|
||||||
* @param cmds - list of commands
|
|
||||||
* @param cb - callback
|
|
||||||
* @return
|
|
||||||
*/
|
|
||||||
batch(cmds: string[][], cb: Callback) {
|
batch(cmds: string[][], cb: Callback) {
|
||||||
return this._client.pipeline(cmds).exec(cb);
|
return this._client.pipeline(cmds).exec(cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Checks if a key exists
|
* Checks if a key exists
|
||||||
* @param key - name of key
|
|
||||||
* @param cb - callback
|
* @param cb - callback
|
||||||
* If cb response returns 0, key does not exist.
|
* If cb response returns 0, key does not exist.
|
||||||
* If cb response returns 1, key exists.
|
* If cb response returns 1, key exists.
|
||||||
|
@ -108,22 +52,10 @@ export default class RedisClient {
|
||||||
return this._client.exists(key, cb);
|
return this._client.exists(key, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* get value stored at key
|
|
||||||
* @param key - key holding the value
|
|
||||||
* @param cb - callback
|
|
||||||
*/
|
|
||||||
get(key: string, cb: Callback) {
|
|
||||||
return this._client.get(key, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Add a value and its score to a sorted set. If no sorted set exists, this
|
* Add a value and its score to a sorted set. If no sorted set exists, this
|
||||||
* will create a new one for the given key.
|
* will create a new one for the given key.
|
||||||
* @param key - name of key
|
|
||||||
* @param score - score used to order set
|
* @param score - score used to order set
|
||||||
* @param value - value to store
|
|
||||||
* @param cb - callback
|
|
||||||
*/
|
*/
|
||||||
zadd(key: string, score: number, value: string, cb: Callback) {
|
zadd(key: string, score: number, value: string, cb: Callback) {
|
||||||
return this._client.zadd(key, score, value, cb);
|
return this._client.zadd(key, score, value, cb);
|
||||||
|
@ -134,8 +66,6 @@ export default class RedisClient {
|
||||||
* Note: using this on a key that does not exist will return 0.
|
* Note: using this on a key that does not exist will return 0.
|
||||||
* Note: using this on an existing key that isn't a sorted set will
|
* Note: using this on an existing key that isn't a sorted set will
|
||||||
* return an error WRONGTYPE.
|
* return an error WRONGTYPE.
|
||||||
* @param key - name of key
|
|
||||||
* @param cb - callback
|
|
||||||
*/
|
*/
|
||||||
zcard(key: string, cb: Callback) {
|
zcard(key: string, cb: Callback) {
|
||||||
return this._client.zcard(key, cb);
|
return this._client.zcard(key, cb);
|
||||||
|
@ -146,9 +76,6 @@ export default class RedisClient {
|
||||||
* Note: using this on a key that does not exist will return nil.
|
* Note: using this on a key that does not exist will return nil.
|
||||||
* Note: using this on a value that does not exist in a valid sorted set key
|
* Note: using this on a value that does not exist in a valid sorted set key
|
||||||
* will return nil.
|
* will return nil.
|
||||||
* @param key - name of key
|
|
||||||
* @param value - value within sorted set
|
|
||||||
* @param cb - callback
|
|
||||||
*/
|
*/
|
||||||
zscore(key: string, value: string, cb: Callback) {
|
zscore(key: string, value: string, cb: Callback) {
|
||||||
return this._client.zscore(key, value, cb);
|
return this._client.zscore(key, value, cb);
|
||||||
|
@ -156,10 +83,8 @@ export default class RedisClient {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove a value from a sorted set
|
* Remove a value from a sorted set
|
||||||
* @param key - name of key
|
* @param value - value within sorted set. Can specify multiple values within an array
|
||||||
* @param value - value within sorted set. Can specify
|
* @param {function} cb - callback
|
||||||
* multiple values within an array
|
|
||||||
* @param cb - callback
|
|
||||||
* The cb response returns number of values removed
|
* The cb response returns number of values removed
|
||||||
*/
|
*/
|
||||||
zrem(key: string, value: string | string[], cb: Callback) {
|
zrem(key: string, value: string | string[], cb: Callback) {
|
||||||
|
@ -168,10 +93,8 @@ export default class RedisClient {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get specified range of elements in a sorted set
|
* Get specified range of elements in a sorted set
|
||||||
* @param key - name of key
|
|
||||||
* @param start - start index (inclusive)
|
* @param start - start index (inclusive)
|
||||||
* @param end - end index (inclusive) (can use -1)
|
* @param end - end index (inclusive) (can use -1)
|
||||||
* @param cb - callback
|
|
||||||
*/
|
*/
|
||||||
zrange(key: string, start: number, end: number, cb: Callback) {
|
zrange(key: string, start: number, end: number, cb: Callback) {
|
||||||
return this._client.zrange(key, start, end, cb);
|
return this._client.zrange(key, start, end, cb);
|
||||||
|
@ -179,12 +102,10 @@ export default class RedisClient {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get range of elements in a sorted set based off score
|
* Get range of elements in a sorted set based off score
|
||||||
* @param key - name of key
|
|
||||||
* @param min - min score value (inclusive)
|
* @param min - min score value (inclusive)
|
||||||
* (can use "-inf")
|
* (can use "-inf")
|
||||||
* @param max - max score value (inclusive)
|
* @param max - max score value (inclusive)
|
||||||
* (can use "+inf")
|
* (can use "+inf")
|
||||||
* @param cb - callback
|
|
||||||
*/
|
*/
|
||||||
zrangebyscore(
|
zrangebyscore(
|
||||||
key: string,
|
key: string,
|
||||||
|
@ -195,15 +116,6 @@ export default class RedisClient {
|
||||||
return this._client.zrangebyscore(key, min, max, cb);
|
return this._client.zrangebyscore(key, min, max, cb);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* get TTL or expiration in seconds
|
|
||||||
* @param key - name of key
|
|
||||||
* @param cb - callback
|
|
||||||
*/
|
|
||||||
ttl(key: string, cb: Callback) {
|
|
||||||
return this._client.ttl(key, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
clear(cb: Callback) {
|
clear(cb: Callback) {
|
||||||
return this._client.flushdb(cb);
|
return this._client.flushdb(cb);
|
||||||
}
|
}
|
||||||
|
@ -211,8 +123,4 @@ export default class RedisClient {
|
||||||
disconnect() {
|
disconnect() {
|
||||||
this._client.disconnect();
|
this._client.disconnect();
|
||||||
}
|
}
|
||||||
|
|
||||||
listClients(cb: Callback) {
|
|
||||||
return this._client.client('list', cb);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,8 +2,6 @@ import async from 'async';
|
||||||
import RedisClient from './RedisClient';
|
import RedisClient from './RedisClient';
|
||||||
import { Logger } from 'werelogs';
|
import { Logger } from 'werelogs';
|
||||||
|
|
||||||
export type Callback = (error: Error | null, value?: any) => void;
|
|
||||||
|
|
||||||
export default class StatsClient {
|
export default class StatsClient {
|
||||||
_redis: RedisClient;
|
_redis: RedisClient;
|
||||||
_interval: number;
|
_interval: number;
|
||||||
|
@ -50,7 +48,7 @@ export default class StatsClient {
|
||||||
* @param d - Date instance
|
* @param d - Date instance
|
||||||
* @return key - key for redis
|
* @return key - key for redis
|
||||||
*/
|
*/
|
||||||
buildKey(name: string, d: Date): string {
|
_buildKey(name: string, d: Date): string {
|
||||||
return `${name}:${this._normalizeTimestamp(d)}`;
|
return `${name}:${this._normalizeTimestamp(d)}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -93,33 +91,11 @@ export default class StatsClient {
|
||||||
amount = (typeof incr === 'number') ? incr : 1;
|
amount = (typeof incr === 'number') ? incr : 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
const key = this.buildKey(`${id}:requests`, new Date());
|
const key = this._buildKey(`${id}:requests`, new Date());
|
||||||
|
|
||||||
return this._redis.incrbyEx(key, amount, this._expiry, callback);
|
return this._redis.incrbyEx(key, amount, this._expiry, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Increment the given key by the given value.
|
|
||||||
* @param key - The Redis key to increment
|
|
||||||
* @param incr - The value to increment by
|
|
||||||
* @param [cb] - callback
|
|
||||||
*/
|
|
||||||
incrementKey(key: string, incr: number, cb: Callback) {
|
|
||||||
const callback = cb || this._noop;
|
|
||||||
return this._redis.incrby(key, incr, callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Decrement the given key by the given value.
|
|
||||||
* @param key - The Redis key to decrement
|
|
||||||
* @param decr - The value to decrement by
|
|
||||||
* @param [cb] - callback
|
|
||||||
*/
|
|
||||||
decrementKey(key: string, decr: number, cb: Callback) {
|
|
||||||
const callback = cb || this._noop;
|
|
||||||
return this._redis.decrby(key, decr, callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* report/record a request that ended up being a 500 on the server
|
* report/record a request that ended up being a 500 on the server
|
||||||
* @param id - service identifier
|
* @param id - service identifier
|
||||||
|
@ -129,53 +105,10 @@ export default class StatsClient {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
const callback = cb || this._noop;
|
const callback = cb || this._noop;
|
||||||
const key = this.buildKey(`${id}:500s`, new Date());
|
const key = this._buildKey(`${id}:500s`, new Date());
|
||||||
return this._redis.incrEx(key, this._expiry, callback);
|
return this._redis.incrEx(key, this._expiry, callback);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* wrapper on `getStats` that handles a list of keys
|
|
||||||
* @param log - Werelogs request logger
|
|
||||||
* @param ids - service identifiers
|
|
||||||
* @param cb - callback to call with the err/result
|
|
||||||
*/
|
|
||||||
getAllStats(log: Logger, ids: string[], cb: Callback) {
|
|
||||||
if (!this._redis) {
|
|
||||||
return cb(null, {});
|
|
||||||
}
|
|
||||||
|
|
||||||
const statsRes = {
|
|
||||||
'requests': 0,
|
|
||||||
'500s': 0,
|
|
||||||
'sampleDuration': this._expiry,
|
|
||||||
};
|
|
||||||
let requests = 0;
|
|
||||||
let errors = 0;
|
|
||||||
|
|
||||||
// for now set concurrency to default of 10
|
|
||||||
return async.eachLimit(ids, 10, (id: string, done) => {
|
|
||||||
this.getStats(log, id, (err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return done(err);
|
|
||||||
}
|
|
||||||
requests += res.requests;
|
|
||||||
errors += res['500s'];
|
|
||||||
return done();
|
|
||||||
});
|
|
||||||
}, error => {
|
|
||||||
if (error) {
|
|
||||||
log.error('error getting stats', {
|
|
||||||
error,
|
|
||||||
method: 'StatsClient.getAllStats',
|
|
||||||
});
|
|
||||||
return cb(null, statsRes);
|
|
||||||
}
|
|
||||||
statsRes.requests = requests;
|
|
||||||
statsRes['500s'] = errors;
|
|
||||||
return cb(null, statsRes);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* get stats for the last x seconds, x being the sampling duration
|
* get stats for the last x seconds, x being the sampling duration
|
||||||
* @param log - Werelogs request logger
|
* @param log - Werelogs request logger
|
||||||
|
@ -190,8 +123,8 @@ export default class StatsClient {
|
||||||
const reqsKeys: ['get', string][] = [];
|
const reqsKeys: ['get', string][] = [];
|
||||||
const req500sKeys: ['get', string][] = [];
|
const req500sKeys: ['get', string][] = [];
|
||||||
for (let i = 0; i < totalKeys; i++) {
|
for (let i = 0; i < totalKeys; i++) {
|
||||||
reqsKeys.push(['get', this.buildKey(`${id}:requests`, d)]);
|
reqsKeys.push(['get', this._buildKey(`${id}:requests`, d)]);
|
||||||
req500sKeys.push(['get', this.buildKey(`${id}:500s`, d)]);
|
req500sKeys.push(['get', this._buildKey(`${id}:500s`, d)]);
|
||||||
this._setPrevInterval(d);
|
this._setPrevInterval(d);
|
||||||
}
|
}
|
||||||
return async.parallel([
|
return async.parallel([
|
||||||
|
|
|
@ -1,8 +1,4 @@
|
||||||
import StatsClient from './StatsClient';
|
import StatsClient from './StatsClient';
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
import async from 'async';
|
|
||||||
|
|
||||||
export type Callback = (error: Error | null, value?: any) => void;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @class StatsModel
|
* @class StatsModel
|
||||||
|
@ -11,145 +7,12 @@ export type Callback = (error: Error | null, value?: any) => void;
|
||||||
* rather than by seconds
|
* rather than by seconds
|
||||||
*/
|
*/
|
||||||
export default class StatsModel extends StatsClient {
|
export default class StatsModel extends StatsClient {
|
||||||
/**
|
|
||||||
* Utility method to convert 2d array rows to columns, and vice versa
|
|
||||||
* See also: https://docs.ruby-lang.org/en/2.0.0/Array.html#method-i-zip
|
|
||||||
* @param arrays - 2d array of integers
|
|
||||||
* @return converted array
|
|
||||||
*/
|
|
||||||
_zip(arrays: number[][]) {
|
|
||||||
if (arrays.length > 0 && arrays.every(a => Array.isArray(a))) {
|
|
||||||
return arrays[0].map((_, i) => arrays.map(a => a[i]));
|
|
||||||
}
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* normalize to the nearest interval
|
|
||||||
* @param d - Date instance
|
|
||||||
* @return timestamp - normalized to the nearest interval
|
|
||||||
*/
|
|
||||||
_normalizeTimestamp(d: Date) {
|
|
||||||
const m = d.getMinutes();
|
|
||||||
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* override the method to get the count as an array of integers separated
|
|
||||||
* by each interval
|
|
||||||
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
|
||||||
* @param arr - each index contains the result of each batch command
|
|
||||||
* where index 0 signifies the error and index 1 contains the result
|
|
||||||
* @return array of integers, ordered from most recent interval to
|
|
||||||
* oldest interval with length of (expiry / interval)
|
|
||||||
*/
|
|
||||||
// @ts-expect-errors
|
|
||||||
_getCount(arr: [any, string | null][]): number[] {
|
|
||||||
const size = Math.floor(this._expiry / this._interval);
|
|
||||||
const array = arr.reduce((store, i) => {
|
|
||||||
let num = parseInt(i[1] ?? '', 10);
|
|
||||||
num = Number.isNaN(num) ? 0 : num;
|
|
||||||
store.push(num);
|
|
||||||
return store;
|
|
||||||
}, [] as number[]);
|
|
||||||
|
|
||||||
if (array.length < size) {
|
|
||||||
array.push(...Array(size - array.length).fill(0));
|
|
||||||
}
|
|
||||||
return array;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* wrapper on `getStats` that handles a list of keys
|
|
||||||
* override the method to reduce the returned 2d array from `_getCount`
|
|
||||||
* @param log - Werelogs request logger
|
|
||||||
* @param ids - service identifiers
|
|
||||||
* @param cb - callback to call with the err/result
|
|
||||||
*/
|
|
||||||
getAllStats(log: Logger, ids: string[], cb: Callback) {
|
|
||||||
if (!this._redis) {
|
|
||||||
return cb(null, {});
|
|
||||||
}
|
|
||||||
|
|
||||||
const size = Math.floor(this._expiry / this._interval);
|
|
||||||
const statsRes = {
|
|
||||||
'requests': Array(size).fill(0),
|
|
||||||
'500s': Array(size).fill(0),
|
|
||||||
'sampleDuration': this._expiry,
|
|
||||||
};
|
|
||||||
const requests: any[] = [];
|
|
||||||
const errors: any[] = [];
|
|
||||||
|
|
||||||
if (ids.length === 0) {
|
|
||||||
return cb(null, statsRes);
|
|
||||||
}
|
|
||||||
|
|
||||||
// for now set concurrency to default of 10
|
|
||||||
return async.eachLimit(ids, 10, (id, done) => {
|
|
||||||
this.getStats(log, id, (err, res) => {
|
|
||||||
if (err) {
|
|
||||||
return done(err);
|
|
||||||
}
|
|
||||||
requests.push(res.requests);
|
|
||||||
errors.push(res['500s']);
|
|
||||||
return done();
|
|
||||||
});
|
|
||||||
}, error => {
|
|
||||||
if (error) {
|
|
||||||
log.error('error getting stats', {
|
|
||||||
error,
|
|
||||||
method: 'StatsModel.getAllStats',
|
|
||||||
});
|
|
||||||
return cb(null, statsRes);
|
|
||||||
}
|
|
||||||
|
|
||||||
statsRes.requests = this._zip(requests).map(arr =>
|
|
||||||
arr.reduce((acc, i) => acc + i), 0);
|
|
||||||
statsRes['500s'] = this._zip(errors).map(arr =>
|
|
||||||
arr.reduce((acc, i) => acc + i), 0);
|
|
||||||
|
|
||||||
return cb(null, statsRes);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Handles getting a list of global keys.
|
|
||||||
* @param ids - Service identifiers
|
|
||||||
* @param log - Werelogs request logger
|
|
||||||
* @param cb - Callback
|
|
||||||
*/
|
|
||||||
getAllGlobalStats(ids: string[], log: Logger, cb: Callback) {
|
|
||||||
const reqsKeys = ids.map(key => (['get', key]));
|
|
||||||
return this._redis.batch(reqsKeys, (err, res) => {
|
|
||||||
const statsRes = { requests: 0 };
|
|
||||||
if (err) {
|
|
||||||
log.error('error getting metrics', {
|
|
||||||
error: err,
|
|
||||||
method: 'StatsClient.getAllGlobalStats',
|
|
||||||
});
|
|
||||||
return cb(null, statsRes);
|
|
||||||
}
|
|
||||||
statsRes.requests = res.reduce((sum, curr) => {
|
|
||||||
const [cmdErr, val] = curr;
|
|
||||||
if (cmdErr) {
|
|
||||||
// Log any individual request errors from the batch request.
|
|
||||||
log.error('error getting metrics', {
|
|
||||||
error: cmdErr,
|
|
||||||
method: 'StatsClient.getAllGlobalStats',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return sum + (Number.parseInt(val, 10) || 0);
|
|
||||||
}, 0);
|
|
||||||
return cb(null, statsRes);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* normalize date timestamp to the nearest hour
|
* normalize date timestamp to the nearest hour
|
||||||
* @param d - Date instance
|
* @param d - Date instance
|
||||||
* @return timestamp - normalized to the nearest hour
|
* @return timestamp - normalized to the nearest hour
|
||||||
*/
|
*/
|
||||||
normalizeTimestampByHour(d: Date) {
|
normalizeTimestampByHour(d: Date): number {
|
||||||
return d.setMinutes(0, 0, 0);
|
return d.setMinutes(0, 0, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -158,10 +21,40 @@ export default class StatsModel extends StatsClient {
|
||||||
* @param d - Date instance
|
* @param d - Date instance
|
||||||
* @return timestamp - one hour prior to date passed
|
* @return timestamp - one hour prior to date passed
|
||||||
*/
|
*/
|
||||||
_getDatePreviousHour(d: Date) {
|
_getDatePreviousHour(d: Date): number {
|
||||||
return d.setHours(d.getHours() - 1);
|
return d.setHours(d.getHours() - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* normalize to the nearest interval
|
||||||
|
* @param d - Date instance
|
||||||
|
* @return timestamp - normalized to the nearest interval
|
||||||
|
*/
|
||||||
|
_normalizeTimestamp(d: Date): number {
|
||||||
|
const m = d.getMinutes();
|
||||||
|
return d.setMinutes(m - m % (Math.floor(this._interval / 60)), 0, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* override the method to get the result as an array of integers separated
|
||||||
|
* by each interval
|
||||||
|
* typical input looks like [[null, '1'], [null, '2'], [null, null]...]
|
||||||
|
* @param arr - each index contains the result of each batch command
|
||||||
|
* where index 0 signifies the error and index 1 contains the result
|
||||||
|
* @return array of integers, ordered from most recent interval to
|
||||||
|
* oldest interval
|
||||||
|
*/
|
||||||
|
// @ts-ignore
|
||||||
|
// TODO change name or conform to parent class method
|
||||||
|
_getCount(arr: [any, string | null][]) {
|
||||||
|
return arr.reduce<number[]>((store, i) => {
|
||||||
|
let num = parseInt(i[1] ?? '', 10);
|
||||||
|
num = Number.isNaN(num) ? 0 : num;
|
||||||
|
store.push(num);
|
||||||
|
return store;
|
||||||
|
}, []);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* get list of sorted set key timestamps
|
* get list of sorted set key timestamps
|
||||||
* @param epoch - epoch time
|
* @param epoch - epoch time
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import { RequestLogger } from 'werelogs';
|
|
||||||
|
|
||||||
import { legacyLocations } from '../constants';
|
import { legacyLocations } from '../constants';
|
||||||
import escapeForXml from '../s3middleware/escapeForXml';
|
import escapeForXml from '../s3middleware/escapeForXml';
|
||||||
|
|
||||||
|
|
|
@ -1,281 +0,0 @@
|
||||||
export type DeleteRetentionPolicy = {
|
|
||||||
enabled: boolean;
|
|
||||||
days: number;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Helper class to ease access to the Azure specific information for
|
|
||||||
* storage accounts mapped to buckets.
|
|
||||||
*/
|
|
||||||
export default class BucketAzureInfo {
|
|
||||||
_data: {
|
|
||||||
sku: string;
|
|
||||||
accessTier: string;
|
|
||||||
kind: string;
|
|
||||||
systemKeys: string[];
|
|
||||||
tenantKeys: string[];
|
|
||||||
subscriptionId: string;
|
|
||||||
resourceGroup: string;
|
|
||||||
deleteRetentionPolicy: DeleteRetentionPolicy;
|
|
||||||
managementPolicies: any[];
|
|
||||||
httpsOnly: boolean;
|
|
||||||
tags: any;
|
|
||||||
networkACL: any[];
|
|
||||||
cname: string;
|
|
||||||
azureFilesAADIntegration: boolean;
|
|
||||||
hnsEnabled: boolean;
|
|
||||||
logging: any;
|
|
||||||
hourMetrics: any;
|
|
||||||
minuteMetrics: any;
|
|
||||||
serviceVersion: string;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param obj - Raw structure for the Azure info on storage account
|
|
||||||
* @param obj.sku - SKU name of this storage account
|
|
||||||
* @param obj.accessTier - Access Tier name of this storage account
|
|
||||||
* @param obj.kind - Kind name of this storage account
|
|
||||||
* @param obj.systemKeys - pair of shared keys for the system
|
|
||||||
* @param obj.tenantKeys - pair of shared keys for the tenant
|
|
||||||
* @param obj.subscriptionId - subscription ID the storage account
|
|
||||||
* belongs to
|
|
||||||
* @param obj.resourceGroup - Resource group name the storage
|
|
||||||
* account belongs to
|
|
||||||
* @param obj.deleteRetentionPolicy - Delete retention policy
|
|
||||||
* @param obj.deleteRetentionPolicy.enabled -
|
|
||||||
* @param obj.deleteRetentionPolicy.days -
|
|
||||||
* @param obj.managementPolicies - Management policies for this
|
|
||||||
* storage account
|
|
||||||
* @param obj.httpsOnly - Server the content of this storage
|
|
||||||
* account through HTTPS only
|
|
||||||
* @param obj.tags - Set of tags applied on this storage account
|
|
||||||
* @param obj.networkACL - Network ACL of this storage account
|
|
||||||
* @param obj.cname - CNAME of this storage account
|
|
||||||
* @param obj.azureFilesAADIntegration - whether or not Azure
|
|
||||||
* Files AAD Integration is enabled for this storage account
|
|
||||||
* @param obj.hnsEnabled - whether or not a hierarchical namespace
|
|
||||||
* is enabled for this storage account
|
|
||||||
* @param obj.logging - service properties: logging
|
|
||||||
* @param obj.hourMetrics - service properties: hourMetrics
|
|
||||||
* @param obj.minuteMetrics - service properties: minuteMetrics
|
|
||||||
* @param obj.serviceVersion - service properties: serviceVersion
|
|
||||||
*/
|
|
||||||
constructor(obj: {
|
|
||||||
sku: string;
|
|
||||||
accessTier: string;
|
|
||||||
kind: string;
|
|
||||||
systemKeys: string[];
|
|
||||||
tenantKeys: string[];
|
|
||||||
subscriptionId: string;
|
|
||||||
resourceGroup: string;
|
|
||||||
deleteRetentionPolicy: DeleteRetentionPolicy;
|
|
||||||
managementPolicies: any[];
|
|
||||||
httpsOnly: boolean;
|
|
||||||
tags: any;
|
|
||||||
networkACL: any[];
|
|
||||||
cname: string;
|
|
||||||
azureFilesAADIntegration: boolean;
|
|
||||||
hnsEnabled: boolean;
|
|
||||||
logging: any;
|
|
||||||
hourMetrics: any;
|
|
||||||
minuteMetrics: any;
|
|
||||||
serviceVersion: string;
|
|
||||||
}) {
|
|
||||||
this._data = {
|
|
||||||
sku: obj.sku,
|
|
||||||
accessTier: obj.accessTier,
|
|
||||||
kind: obj.kind,
|
|
||||||
systemKeys: obj.systemKeys,
|
|
||||||
tenantKeys: obj.tenantKeys,
|
|
||||||
subscriptionId: obj.subscriptionId,
|
|
||||||
resourceGroup: obj.resourceGroup,
|
|
||||||
deleteRetentionPolicy: obj.deleteRetentionPolicy,
|
|
||||||
managementPolicies: obj.managementPolicies,
|
|
||||||
httpsOnly: obj.httpsOnly,
|
|
||||||
tags: obj.tags,
|
|
||||||
networkACL: obj.networkACL,
|
|
||||||
cname: obj.cname,
|
|
||||||
azureFilesAADIntegration: obj.azureFilesAADIntegration,
|
|
||||||
hnsEnabled: obj.hnsEnabled,
|
|
||||||
logging: obj.logging,
|
|
||||||
hourMetrics: obj.hourMetrics,
|
|
||||||
minuteMetrics: obj.minuteMetrics,
|
|
||||||
serviceVersion: obj.serviceVersion,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
getSku() {
|
|
||||||
return this._data.sku;
|
|
||||||
}
|
|
||||||
|
|
||||||
setSku(sku: string) {
|
|
||||||
this._data.sku = sku;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getAccessTier() {
|
|
||||||
return this._data.accessTier;
|
|
||||||
}
|
|
||||||
|
|
||||||
setAccessTier(accessTier: string) {
|
|
||||||
this._data.accessTier = accessTier;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getKind() {
|
|
||||||
return this._data.kind;
|
|
||||||
}
|
|
||||||
|
|
||||||
setKind(kind: string) {
|
|
||||||
this._data.kind = kind;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getSystemKeys() {
|
|
||||||
return this._data.systemKeys;
|
|
||||||
}
|
|
||||||
|
|
||||||
setSystemKeys(systemKeys: string[]) {
|
|
||||||
this._data.systemKeys = systemKeys;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getTenantKeys() {
|
|
||||||
return this._data.tenantKeys;
|
|
||||||
}
|
|
||||||
|
|
||||||
setTenantKeys(tenantKeys: string[]) {
|
|
||||||
this._data.tenantKeys = tenantKeys;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getSubscriptionId() {
|
|
||||||
return this._data.subscriptionId;
|
|
||||||
}
|
|
||||||
|
|
||||||
setSubscriptionId(subscriptionId: string) {
|
|
||||||
this._data.subscriptionId = subscriptionId;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getResourceGroup() {
|
|
||||||
return this._data.resourceGroup;
|
|
||||||
}
|
|
||||||
|
|
||||||
setResourceGroup(resourceGroup: string) {
|
|
||||||
this._data.resourceGroup = resourceGroup;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getDeleteRetentionPolicy() {
|
|
||||||
return this._data.deleteRetentionPolicy;
|
|
||||||
}
|
|
||||||
|
|
||||||
setDeleteRetentionPolicy(deleteRetentionPolicy: DeleteRetentionPolicy) {
|
|
||||||
this._data.deleteRetentionPolicy = deleteRetentionPolicy;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getManagementPolicies() {
|
|
||||||
return this._data.managementPolicies;
|
|
||||||
}
|
|
||||||
|
|
||||||
setManagementPolicies(managementPolicies: any[]) {
|
|
||||||
this._data.managementPolicies = managementPolicies;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getHttpsOnly() {
|
|
||||||
return this._data.httpsOnly;
|
|
||||||
}
|
|
||||||
|
|
||||||
setHttpsOnly(httpsOnly: boolean) {
|
|
||||||
this._data.httpsOnly = httpsOnly;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getTags() {
|
|
||||||
return this._data.tags;
|
|
||||||
}
|
|
||||||
|
|
||||||
setTags(tags: any) {
|
|
||||||
this._data.tags = tags;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getNetworkACL() {
|
|
||||||
return this._data.networkACL;
|
|
||||||
}
|
|
||||||
|
|
||||||
setNetworkACL(networkACL: any[]) {
|
|
||||||
this._data.networkACL = networkACL;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getCname() {
|
|
||||||
return this._data.cname;
|
|
||||||
}
|
|
||||||
|
|
||||||
setCname(cname: string) {
|
|
||||||
this._data.cname = cname;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getAzureFilesAADIntegration() {
|
|
||||||
return this._data.azureFilesAADIntegration;
|
|
||||||
}
|
|
||||||
|
|
||||||
setAzureFilesAADIntegration(azureFilesAADIntegration: boolean) {
|
|
||||||
this._data.azureFilesAADIntegration = azureFilesAADIntegration;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getHnsEnabled() {
|
|
||||||
return this._data.hnsEnabled;
|
|
||||||
}
|
|
||||||
|
|
||||||
setHnsEnabled(hnsEnabled: boolean) {
|
|
||||||
this._data.hnsEnabled = hnsEnabled;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getLogging() {
|
|
||||||
return this._data.logging;
|
|
||||||
}
|
|
||||||
|
|
||||||
setLogging(logging: any) {
|
|
||||||
this._data.logging = logging;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getHourMetrics() {
|
|
||||||
return this._data.hourMetrics;
|
|
||||||
}
|
|
||||||
|
|
||||||
setHourMetrics(hourMetrics: any) {
|
|
||||||
this._data.hourMetrics = hourMetrics;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getMinuteMetrics() {
|
|
||||||
return this._data.minuteMetrics;
|
|
||||||
}
|
|
||||||
|
|
||||||
setMinuteMetrics(minuteMetrics: any) {
|
|
||||||
this._data.minuteMetrics = minuteMetrics;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getServiceVersion() {
|
|
||||||
return this._data.serviceVersion;
|
|
||||||
}
|
|
||||||
|
|
||||||
setServiceVersion(serviceVersion: any) {
|
|
||||||
this._data.serviceVersion = serviceVersion;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getValue() {
|
|
||||||
return this._data;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -8,12 +8,10 @@ import ObjectLockConfiguration from './ObjectLockConfiguration';
|
||||||
import BucketPolicy from './BucketPolicy';
|
import BucketPolicy from './BucketPolicy';
|
||||||
import NotificationConfiguration from './NotificationConfiguration';
|
import NotificationConfiguration from './NotificationConfiguration';
|
||||||
import { ACL as OACL } from './ObjectMD';
|
import { ACL as OACL } from './ObjectMD';
|
||||||
import { areTagsValid, BucketTag } from '../s3middleware/tagging';
|
|
||||||
|
|
||||||
// WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG
|
// WHEN UPDATING THIS NUMBER, UPDATE BucketInfoModelVersion.md CHANGELOG
|
||||||
// BucketInfoModelVersion.md can be found in documentation/ at the root
|
// BucketInfoModelVersion.md can be found in the root of this repository
|
||||||
// of this repository
|
const modelVersion = 10;
|
||||||
const modelVersion = 16;
|
|
||||||
|
|
||||||
export type CORS = {
|
export type CORS = {
|
||||||
id: string;
|
id: string;
|
||||||
|
@ -37,41 +35,6 @@ export type VersioningConfiguration = {
|
||||||
MfaDelete: any;
|
MfaDelete: any;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type VeeamSOSApi = {
|
|
||||||
SystemInfo?: {
|
|
||||||
ProtocolVersion: string,
|
|
||||||
ModelName: string,
|
|
||||||
ProtocolCapabilities: {
|
|
||||||
CapacityInfo: boolean,
|
|
||||||
UploadSessions: boolean,
|
|
||||||
IAMSTS?: boolean,
|
|
||||||
},
|
|
||||||
APIEndpoints?: {
|
|
||||||
IAMEndpoint: string,
|
|
||||||
STSEndpoint: string,
|
|
||||||
},
|
|
||||||
SystemRecommendations?: {
|
|
||||||
S3ConcurrentTaskLimit: number,
|
|
||||||
S3MultiObjectDelete: number,
|
|
||||||
StorageCurrentTasksLimit: number,
|
|
||||||
KbBlockSize: number,
|
|
||||||
}
|
|
||||||
LastModified?: string,
|
|
||||||
},
|
|
||||||
CapacityInfo?: {
|
|
||||||
Capacity: number,
|
|
||||||
Available: number,
|
|
||||||
Used: number,
|
|
||||||
LastModified?: string,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
// Capabilities contains all specifics from external products supported by
|
|
||||||
// our S3 implementation, at bucket level
|
|
||||||
export type Capabilities = {
|
|
||||||
VeeamSOSApi?: VeeamSOSApi,
|
|
||||||
};
|
|
||||||
|
|
||||||
export type ACL = OACL & { WRITE: string[] }
|
export type ACL = OACL & { WRITE: string[] }
|
||||||
|
|
||||||
export default class BucketInfo {
|
export default class BucketInfo {
|
||||||
|
@ -95,70 +58,56 @@ export default class BucketInfo {
|
||||||
_objectLockEnabled?: boolean;
|
_objectLockEnabled?: boolean;
|
||||||
_objectLockConfiguration?: any;
|
_objectLockConfiguration?: any;
|
||||||
_notificationConfiguration?: any;
|
_notificationConfiguration?: any;
|
||||||
_tags?: Array<BucketTag>;
|
_tags?: { key: string; value: string }[] | null;
|
||||||
_readLocationConstraint: string | null;
|
|
||||||
_isNFS: boolean | null;
|
|
||||||
_azureInfo: any | null;
|
|
||||||
_ingestion: { status: 'enabled' | 'disabled' } | null;
|
|
||||||
_capabilities?: Capabilities;
|
|
||||||
_quotaMax: number | 0;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents all bucket information.
|
* Represents all bucket information.
|
||||||
* @constructor
|
* @constructor
|
||||||
* @param name - bucket name
|
* @param {string} name - bucket name
|
||||||
* @param owner - bucket owner's name
|
* @param {string} owner - bucket owner's name
|
||||||
* @param ownerDisplayName - owner's display name
|
* @param {string} ownerDisplayName - owner's display name
|
||||||
* @param creationDate - creation date of bucket
|
* @param {object} creationDate - creation date of bucket
|
||||||
* @param mdBucketModelVersion - bucket model version
|
* @param {number} mdBucketModelVersion - bucket model version
|
||||||
* @param [acl] - bucket ACLs (no need to copy
|
* @param {object} [acl] - bucket ACLs (no need to copy
|
||||||
* ACL object since referenced object will not be used outside of
|
* ACL object since referenced object will not be used outside of
|
||||||
* BucketInfo instance)
|
* BucketInfo instance)
|
||||||
* @param transient - flag indicating whether bucket is transient
|
* @param {boolean} transient - flag indicating whether bucket is transient
|
||||||
* @param deleted - flag indicating whether attempt to delete
|
* @param {boolean} deleted - flag indicating whether attempt to delete
|
||||||
* @param serverSideEncryption - sse information for this bucket
|
* @param {object} serverSideEncryption - sse information for this bucket
|
||||||
* @param serverSideEncryption.cryptoScheme -
|
* @param {number} serverSideEncryption.cryptoScheme -
|
||||||
* cryptoScheme used
|
* cryptoScheme used
|
||||||
* @param serverSideEncryption.algorithm -
|
* @param {string} serverSideEncryption.algorithm -
|
||||||
* algorithm to use
|
* algorithm to use
|
||||||
* @param serverSideEncryption.masterKeyId -
|
* @param {string} serverSideEncryption.masterKeyId -
|
||||||
* key to get master key
|
* key to get master key
|
||||||
* @param serverSideEncryption.configuredMasterKeyId -
|
* @param {string} serverSideEncryption.configuredMasterKeyId -
|
||||||
* custom KMS key id specified by user
|
* custom KMS key id specified by user
|
||||||
* @param serverSideEncryption.mandatory -
|
* @param {boolean} serverSideEncryption.mandatory -
|
||||||
* true for mandatory encryption
|
* true for mandatory encryption
|
||||||
* bucket has been made
|
* bucket has been made
|
||||||
* @param versioningConfiguration - versioning configuration
|
* @param {object} versioningConfiguration - versioning configuration
|
||||||
* @param versioningConfiguration.Status - versioning status
|
* @param {string} versioningConfiguration.Status - versioning status
|
||||||
* @param versioningConfiguration.MfaDelete - versioning mfa delete
|
* @param {object} versioningConfiguration.MfaDelete - versioning mfa delete
|
||||||
* @param locationConstraint - locationConstraint for bucket that
|
* @param {string} locationConstraint - locationConstraint for bucket
|
||||||
* also includes the ingestion flag
|
* @param {WebsiteConfiguration} [websiteConfiguration] - website
|
||||||
* @param [websiteConfiguration] - website
|
|
||||||
* configuration
|
* configuration
|
||||||
* @param [cors] - collection of CORS rules to apply
|
* @param {object[]} [cors] - collection of CORS rules to apply
|
||||||
* @param [cors[].id] - optional ID to identify rule
|
* @param {string} [cors[].id] - optional ID to identify rule
|
||||||
* @param cors[].allowedMethods - methods allowed for CORS request
|
* @param {string[]} cors[].allowedMethods - methods allowed for CORS request
|
||||||
* @param cors[].allowedOrigins - origins allowed for CORS request
|
* @param {string[]} cors[].allowedOrigins - origins allowed for CORS request
|
||||||
* @param [cors[].allowedHeaders] - headers allowed in an OPTIONS
|
* @param {string[]} [cors[].allowedHeaders] - headers allowed in an OPTIONS
|
||||||
* request via the Access-Control-Request-Headers header
|
* request via the Access-Control-Request-Headers header
|
||||||
* @param [cors[].maxAgeSeconds] - seconds browsers should cache
|
* @param {number} [cors[].maxAgeSeconds] - seconds browsers should cache
|
||||||
* OPTIONS response
|
* OPTIONS response
|
||||||
* @param [cors[].exposeHeaders] - headers expose to applications
|
* @param {string[]} [cors[].exposeHeaders] - headers expose to applications
|
||||||
* @param [replicationConfiguration] - replication configuration
|
* @param {object} [replicationConfiguration] - replication configuration
|
||||||
* @param [lifecycleConfiguration] - lifecycle configuration
|
* @param {object} [lifecycleConfiguration] - lifecycle configuration
|
||||||
* @param [bucketPolicy] - bucket policy
|
* @param {object} [bucketPolicy] - bucket policy
|
||||||
* @param [uid] - unique identifier for the bucket, necessary
|
* @param {string} [uid] - unique identifier for the bucket, necessary
|
||||||
* @param readLocationConstraint - readLocationConstraint for bucket
|
* @param {boolean} [objectLockEnabled] - true when object lock enabled
|
||||||
* addition for use with lifecycle operations
|
* @param {object} [objectLockConfiguration] - object lock configuration
|
||||||
* @param [isNFS] - whether the bucket is on NFS
|
* @param {object} [notificationConfiguration] - bucket notification configuration
|
||||||
* @param [ingestionConfig] - object for ingestion status: en/dis
|
* @param {object[]} [tags] - bucket tags
|
||||||
* @param [azureInfo] - Azure storage account specific info
|
|
||||||
* @param [objectLockEnabled] - true when object lock enabled
|
|
||||||
* @param [objectLockConfiguration] - object lock configuration
|
|
||||||
* @param [notificationConfiguration] - bucket notification configuration
|
|
||||||
* @param [tags] - bucket tag set
|
|
||||||
* @param [capabilities] - capabilities for the bucket
|
|
||||||
* @param quotaMax - bucket quota
|
|
||||||
*/
|
*/
|
||||||
constructor(
|
constructor(
|
||||||
name: string,
|
name: string,
|
||||||
|
@ -178,16 +127,10 @@ export default class BucketInfo {
|
||||||
lifecycleConfiguration?: any,
|
lifecycleConfiguration?: any,
|
||||||
bucketPolicy?: any,
|
bucketPolicy?: any,
|
||||||
uid?: string,
|
uid?: string,
|
||||||
readLocationConstraint?: string,
|
|
||||||
isNFS?: boolean,
|
|
||||||
ingestionConfig?: { status: 'enabled' | 'disabled' },
|
|
||||||
azureInfo?: any,
|
|
||||||
objectLockEnabled?: boolean,
|
objectLockEnabled?: boolean,
|
||||||
objectLockConfiguration?: any,
|
objectLockConfiguration?: any,
|
||||||
notificationConfiguration?: any,
|
notificationConfiguration?: any,
|
||||||
tags?: Array<BucketTag> | [],
|
tags?: { key: string; value: string }[],
|
||||||
capabilities?: Capabilities,
|
|
||||||
quotaMax?: number | 0,
|
|
||||||
) {
|
) {
|
||||||
assert.strictEqual(typeof name, 'string');
|
assert.strictEqual(typeof name, 'string');
|
||||||
assert.strictEqual(typeof owner, 'string');
|
assert.strictEqual(typeof owner, 'string');
|
||||||
|
@ -229,15 +172,6 @@ export default class BucketInfo {
|
||||||
if (locationConstraint) {
|
if (locationConstraint) {
|
||||||
assert.strictEqual(typeof locationConstraint, 'string');
|
assert.strictEqual(typeof locationConstraint, 'string');
|
||||||
}
|
}
|
||||||
if (ingestionConfig) {
|
|
||||||
assert.strictEqual(typeof ingestionConfig, 'object');
|
|
||||||
}
|
|
||||||
if (azureInfo) {
|
|
||||||
assert.strictEqual(typeof azureInfo, 'object');
|
|
||||||
}
|
|
||||||
if (readLocationConstraint) {
|
|
||||||
assert.strictEqual(typeof readLocationConstraint, 'string');
|
|
||||||
}
|
|
||||||
if (websiteConfiguration) {
|
if (websiteConfiguration) {
|
||||||
assert(websiteConfiguration instanceof WebsiteConfiguration);
|
assert(websiteConfiguration instanceof WebsiteConfiguration);
|
||||||
const indexDocument = websiteConfiguration.getIndexDocument();
|
const indexDocument = websiteConfiguration.getIndexDocument();
|
||||||
|
@ -283,14 +217,8 @@ export default class BucketInfo {
|
||||||
READ: [],
|
READ: [],
|
||||||
READ_ACP: [],
|
READ_ACP: [],
|
||||||
};
|
};
|
||||||
|
if (tags) {
|
||||||
if (tags === undefined) {
|
assert(Array.isArray(tags));
|
||||||
tags = [] as BucketTag[];
|
|
||||||
}
|
|
||||||
assert.strictEqual(areTagsValid(tags), true);
|
|
||||||
if (quotaMax) {
|
|
||||||
assert.strictEqual(typeof quotaMax, 'number');
|
|
||||||
assert(quotaMax >= 0, 'Quota cannot be negative');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// IF UPDATING PROPERTIES, INCREMENT MODELVERSION NUMBER ABOVE
|
// IF UPDATING PROPERTIES, INCREMENT MODELVERSION NUMBER ABOVE
|
||||||
|
@ -305,22 +233,16 @@ export default class BucketInfo {
|
||||||
this._serverSideEncryption = serverSideEncryption || null;
|
this._serverSideEncryption = serverSideEncryption || null;
|
||||||
this._versioningConfiguration = versioningConfiguration || null;
|
this._versioningConfiguration = versioningConfiguration || null;
|
||||||
this._locationConstraint = locationConstraint || null;
|
this._locationConstraint = locationConstraint || null;
|
||||||
this._readLocationConstraint = readLocationConstraint || null;
|
|
||||||
this._websiteConfiguration = websiteConfiguration || null;
|
this._websiteConfiguration = websiteConfiguration || null;
|
||||||
this._replicationConfiguration = replicationConfiguration || null;
|
this._replicationConfiguration = replicationConfiguration || null;
|
||||||
this._cors = cors || null;
|
this._cors = cors || null;
|
||||||
this._lifecycleConfiguration = lifecycleConfiguration || null;
|
this._lifecycleConfiguration = lifecycleConfiguration || null;
|
||||||
this._bucketPolicy = bucketPolicy || null;
|
this._bucketPolicy = bucketPolicy || null;
|
||||||
this._uid = uid || uuid();
|
this._uid = uid || uuid();
|
||||||
this._isNFS = isNFS || null;
|
|
||||||
this._ingestion = ingestionConfig || null;
|
|
||||||
this._azureInfo = azureInfo || null;
|
|
||||||
this._objectLockEnabled = objectLockEnabled || false;
|
this._objectLockEnabled = objectLockEnabled || false;
|
||||||
this._objectLockConfiguration = objectLockConfiguration || null;
|
this._objectLockConfiguration = objectLockConfiguration || null;
|
||||||
this._notificationConfiguration = notificationConfiguration || null;
|
this._notificationConfiguration = notificationConfiguration || null;
|
||||||
this._tags = tags;
|
this._tags = tags || null;
|
||||||
this._capabilities = capabilities || undefined;
|
|
||||||
this._quotaMax = quotaMax || 0;
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -341,22 +263,16 @@ export default class BucketInfo {
|
||||||
serverSideEncryption: this._serverSideEncryption,
|
serverSideEncryption: this._serverSideEncryption,
|
||||||
versioningConfiguration: this._versioningConfiguration,
|
versioningConfiguration: this._versioningConfiguration,
|
||||||
locationConstraint: this._locationConstraint,
|
locationConstraint: this._locationConstraint,
|
||||||
readLocationConstraint: this._readLocationConstraint,
|
|
||||||
websiteConfiguration: undefined,
|
websiteConfiguration: undefined,
|
||||||
cors: this._cors,
|
cors: this._cors,
|
||||||
replicationConfiguration: this._replicationConfiguration,
|
replicationConfiguration: this._replicationConfiguration,
|
||||||
lifecycleConfiguration: this._lifecycleConfiguration,
|
lifecycleConfiguration: this._lifecycleConfiguration,
|
||||||
bucketPolicy: this._bucketPolicy,
|
bucketPolicy: this._bucketPolicy,
|
||||||
uid: this._uid,
|
uid: this._uid,
|
||||||
isNFS: this._isNFS,
|
|
||||||
ingestion: this._ingestion,
|
|
||||||
azureInfo: this._azureInfo,
|
|
||||||
objectLockEnabled: this._objectLockEnabled,
|
objectLockEnabled: this._objectLockEnabled,
|
||||||
objectLockConfiguration: this._objectLockConfiguration,
|
objectLockConfiguration: this._objectLockConfiguration,
|
||||||
notificationConfiguration: this._notificationConfiguration,
|
notificationConfiguration: this._notificationConfiguration,
|
||||||
tags: this._tags,
|
tags: this._tags,
|
||||||
capabilities: this._capabilities,
|
|
||||||
quotaMax: this._quotaMax,
|
|
||||||
};
|
};
|
||||||
const final = this._websiteConfiguration
|
const final = this._websiteConfiguration
|
||||||
? {
|
? {
|
||||||
|
@ -380,10 +296,8 @@ export default class BucketInfo {
|
||||||
obj.transient, obj.deleted, obj.serverSideEncryption,
|
obj.transient, obj.deleted, obj.serverSideEncryption,
|
||||||
obj.versioningConfiguration, obj.locationConstraint, websiteConfig,
|
obj.versioningConfiguration, obj.locationConstraint, websiteConfig,
|
||||||
obj.cors, obj.replicationConfiguration, obj.lifecycleConfiguration,
|
obj.cors, obj.replicationConfiguration, obj.lifecycleConfiguration,
|
||||||
obj.bucketPolicy, obj.uid, obj.readLocationConstraint, obj.isNFS,
|
obj.bucketPolicy, obj.uid, obj.objectLockEnabled,
|
||||||
obj.ingestion, obj.azureInfo, obj.objectLockEnabled,
|
obj.objectLockConfiguration, obj.notificationConfiguration, obj.tags);
|
||||||
obj.objectLockConfiguration, obj.notificationConfiguration, obj.tags,
|
|
||||||
obj.capabilities, obj.quotaMax);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -407,11 +321,8 @@ export default class BucketInfo {
|
||||||
data._versioningConfiguration, data._locationConstraint,
|
data._versioningConfiguration, data._locationConstraint,
|
||||||
data._websiteConfiguration, data._cors,
|
data._websiteConfiguration, data._cors,
|
||||||
data._replicationConfiguration, data._lifecycleConfiguration,
|
data._replicationConfiguration, data._lifecycleConfiguration,
|
||||||
data._bucketPolicy, data._uid, data._readLocationConstraint,
|
data._bucketPolicy, data._uid, data._objectLockEnabled,
|
||||||
data._isNFS, data._ingestion, data._azureInfo,
|
data._objectLockConfiguration, data._notificationConfiguration, data._tags);
|
||||||
data._objectLockEnabled, data._objectLockConfiguration,
|
|
||||||
data._notificationConfiguration, data._tags, data._capabilities,
|
|
||||||
data._quotaMax);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -708,17 +619,6 @@ export default class BucketInfo {
|
||||||
return this._locationConstraint;
|
return this._locationConstraint;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get read location constraint.
|
|
||||||
* @return - bucket read location constraint
|
|
||||||
*/
|
|
||||||
getReadLocationConstraint() {
|
|
||||||
if (this._readLocationConstraint) {
|
|
||||||
return this._readLocationConstraint;
|
|
||||||
}
|
|
||||||
return this._locationConstraint;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set Bucket model version
|
* Set Bucket model version
|
||||||
*
|
*
|
||||||
|
@ -807,85 +707,6 @@ export default class BucketInfo {
|
||||||
this._uid = uid;
|
this._uid = uid;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
/**
|
|
||||||
* Check if the bucket is an NFS bucket.
|
|
||||||
* @return - Wether the bucket is NFS or not
|
|
||||||
*/
|
|
||||||
isNFS() {
|
|
||||||
return this._isNFS;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set whether the bucket is an NFS bucket.
|
|
||||||
* @param isNFS - Wether the bucket is NFS or not
|
|
||||||
* @return - bucket info instance
|
|
||||||
*/
|
|
||||||
setIsNFS(isNFS: boolean) {
|
|
||||||
this._isNFS = isNFS;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* enable ingestion, set 'this._ingestion' to { status: 'enabled' }
|
|
||||||
* @return - bucket info instance
|
|
||||||
*/
|
|
||||||
enableIngestion() {
|
|
||||||
this._ingestion = { status: 'enabled' };
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* disable ingestion, set 'this._ingestion' to { status: 'disabled' }
|
|
||||||
* @return - bucket info instance
|
|
||||||
*/
|
|
||||||
disableIngestion() {
|
|
||||||
this._ingestion = { status: 'disabled' };
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Get ingestion configuration
|
|
||||||
* @return - bucket ingestion configuration: Enabled or Disabled
|
|
||||||
*/
|
|
||||||
getIngestion() {
|
|
||||||
return this._ingestion;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
** Check if bucket is an ingestion bucket
|
|
||||||
* @return - 'true' if bucket is ingestion bucket, 'false' if
|
|
||||||
* otherwise
|
|
||||||
*/
|
|
||||||
isIngestionBucket() {
|
|
||||||
const ingestionConfig = this.getIngestion();
|
|
||||||
if (ingestionConfig) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Check if ingestion is enabled
|
|
||||||
* @return - 'true' if ingestion is enabled, otherwise 'false'
|
|
||||||
*/
|
|
||||||
isIngestionEnabled() {
|
|
||||||
const ingestionConfig = this.getIngestion();
|
|
||||||
return ingestionConfig ? ingestionConfig.status === 'enabled' : false;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Return the Azure specific storage account information for this bucket
|
|
||||||
* @return - a structure suitable for {@link BucketAzureIno}
|
|
||||||
* constructor
|
|
||||||
*/
|
|
||||||
getAzureInfo() {
|
|
||||||
return this._azureInfo;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Set the Azure specific storage account information for this bucket
|
|
||||||
* @param azureInfo - a structure suitable for
|
|
||||||
* {@link BucketAzureInfo} construction
|
|
||||||
* @return - bucket info instance
|
|
||||||
*/
|
|
||||||
setAzureInfo(azureInfo: any) {
|
|
||||||
this._azureInfo = azureInfo;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* Check if object lock is enabled.
|
* Check if object lock is enabled.
|
||||||
* @return - depending on whether object lock is enabled
|
* @return - depending on whether object lock is enabled
|
||||||
|
@ -905,7 +726,7 @@ export default class BucketInfo {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the value of bucket tags
|
* Get the value of bucket tags
|
||||||
* @return - Array of bucket tags
|
* @return - Array of bucket tags as {"key" : "key", "value": "value"}
|
||||||
*/
|
*/
|
||||||
getTags() {
|
getTags() {
|
||||||
return this._tags;
|
return this._tags;
|
||||||
|
@ -913,58 +734,13 @@ export default class BucketInfo {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set bucket tags
|
* Set bucket tags
|
||||||
|
* @param tags - collection of tags
|
||||||
|
* @param tags[].key - key of the tag
|
||||||
|
* @param tags[].value - value of the tag
|
||||||
* @return - bucket info instance
|
* @return - bucket info instance
|
||||||
*/
|
*/
|
||||||
setTags(tags: Array<BucketTag>) {
|
setTags(tags: { key: string; value: string }[]) {
|
||||||
this._tags = tags;
|
this._tags = tags;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the value of bucket capabilities
|
|
||||||
* @return - capabilities of the bucket
|
|
||||||
*/
|
|
||||||
getCapabilities() {
|
|
||||||
return this._capabilities;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get a specific bucket capability
|
|
||||||
*
|
|
||||||
* @param capability? - if provided, will return a specific capacity
|
|
||||||
* @return - capability of the bucket
|
|
||||||
*/
|
|
||||||
getCapability(capability: string) : VeeamSOSApi | undefined {
|
|
||||||
if (capability && this._capabilities && this._capabilities[capability]) {
|
|
||||||
return this._capabilities[capability];
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set bucket capabilities
|
|
||||||
* @return - bucket info instance
|
|
||||||
*/
|
|
||||||
setCapabilities(capabilities: Capabilities) {
|
|
||||||
this._capabilities = capabilities;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the bucket quota information
|
|
||||||
* @return quotaMax
|
|
||||||
*/
|
|
||||||
getQuota() {
|
|
||||||
return this._quotaMax;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set bucket quota
|
|
||||||
* @param quota - quota to be set
|
|
||||||
* @return - bucket quota info
|
|
||||||
*/
|
|
||||||
setQuota(quota: number) {
|
|
||||||
this._quotaMax = quota || 0;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,8 +7,6 @@ import escapeForXml from '../s3middleware/escapeForXml';
|
||||||
import type { XMLRule } from './ReplicationConfiguration';
|
import type { XMLRule } from './ReplicationConfiguration';
|
||||||
import { Status } from './LifecycleRule';
|
import { Status } from './LifecycleRule';
|
||||||
|
|
||||||
const MAX_DAYS = 2147483647; // Max 32-bit signed binary integer.
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format of xml request:
|
* Format of xml request:
|
||||||
|
|
||||||
|
@ -89,7 +87,6 @@ export default class LifecycleConfiguration {
|
||||||
_parsedXML: any;
|
_parsedXML: any;
|
||||||
_ruleIDs: string[];
|
_ruleIDs: string[];
|
||||||
_tagKeys: string[];
|
_tagKeys: string[];
|
||||||
_storageClasses: string[];
|
|
||||||
_config: {
|
_config: {
|
||||||
error?: ArsenalError;
|
error?: ArsenalError;
|
||||||
rules?: any[];
|
rules?: any[];
|
||||||
|
@ -98,13 +95,10 @@ export default class LifecycleConfiguration {
|
||||||
/**
|
/**
|
||||||
* Create a Lifecycle Configuration instance
|
* Create a Lifecycle Configuration instance
|
||||||
* @param xml - the parsed xml
|
* @param xml - the parsed xml
|
||||||
* @param config - the CloudServer config
|
|
||||||
* @return - LifecycleConfiguration instance
|
* @return - LifecycleConfiguration instance
|
||||||
*/
|
*/
|
||||||
constructor(xml: any, config: { replicationEndpoints: { site: string }[] }) {
|
constructor(xml: any) {
|
||||||
this._parsedXML = xml;
|
this._parsedXML = xml;
|
||||||
this._storageClasses =
|
|
||||||
config.replicationEndpoints.map(endpoint => endpoint.site);
|
|
||||||
this._ruleIDs = [];
|
this._ruleIDs = [];
|
||||||
this._tagKeys = [];
|
this._tagKeys = [];
|
||||||
this._config = {};
|
this._config = {};
|
||||||
|
@ -225,6 +219,11 @@ export default class LifecycleConfiguration {
|
||||||
* }
|
* }
|
||||||
*/
|
*/
|
||||||
_parseRule(rule: XMLRule) {
|
_parseRule(rule: XMLRule) {
|
||||||
|
if (rule.Transition || rule.NoncurrentVersionTransition) {
|
||||||
|
const msg = 'Transition lifecycle action not yet implemented';
|
||||||
|
const error = errors.NotImplemented.customizeDescription(msg);
|
||||||
|
return { error };
|
||||||
|
}
|
||||||
// Either Prefix or Filter must be included, but can be empty string
|
// Either Prefix or Filter must be included, but can be empty string
|
||||||
if ((!rule.Filter && rule.Filter !== '') &&
|
if ((!rule.Filter && rule.Filter !== '') &&
|
||||||
(!rule.Prefix && rule.Prefix !== '')) {
|
(!rule.Prefix && rule.Prefix !== '')) {
|
||||||
|
@ -493,366 +492,6 @@ export default class LifecycleConfiguration {
|
||||||
return { ...base, ruleStatus: status }
|
return { ...base, ruleStatus: status }
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Finds the prefix and/or tags of the given rule and gets the error message
|
|
||||||
* @param rule - The rule to find the prefix in
|
|
||||||
* @return - The prefix of filter information
|
|
||||||
*/
|
|
||||||
_getRuleFilterDesc(rule: { Prefix?: string[]; Filter?: any[] }) {
|
|
||||||
if (rule.Prefix) {
|
|
||||||
return `prefix '${rule.Prefix[0]}'`;
|
|
||||||
}
|
|
||||||
// There must be a filter if no top-level prefix is provided. First
|
|
||||||
// check if there are multiple filters (i.e. `Filter.And`).
|
|
||||||
if (rule.Filter?.[0] === undefined || rule.Filter[0].And === undefined) {
|
|
||||||
const { Prefix, Tag } = rule.Filter?.[0] || {};
|
|
||||||
if (Prefix) {
|
|
||||||
return `filter '(prefix=${Prefix[0]})'`;
|
|
||||||
}
|
|
||||||
if (Tag) {
|
|
||||||
const { Key, Value } = Tag[0];
|
|
||||||
return `filter '(tag: key=${Key[0]}, value=${Value[0]})'`;
|
|
||||||
}
|
|
||||||
return 'filter (all)';
|
|
||||||
}
|
|
||||||
const filters: string[] = [];
|
|
||||||
const { Prefix, Tag } = rule.Filter[0].And[0];
|
|
||||||
if (Prefix) {
|
|
||||||
filters.push(`prefix=${Prefix[0]}`);
|
|
||||||
}
|
|
||||||
Tag.forEach((tag: { Key: string[]; Value: string[] }) => {
|
|
||||||
const { Key, Value } = tag;
|
|
||||||
filters.push(`tag: key=${Key[0]}, value=${Value[0]}`);
|
|
||||||
});
|
|
||||||
const joinedFilters = filters.join(' and ');
|
|
||||||
return `filter '(${joinedFilters})'`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks the validity of the given field
|
|
||||||
* @param params - Given function parameters
|
|
||||||
* @param params.days - The value of the field to check
|
|
||||||
* @param params.field - The field name with the value
|
|
||||||
* @param params.ancestor - The immediate ancestor field
|
|
||||||
* @return Returns an error object or `null`
|
|
||||||
*/
|
|
||||||
_checkDays(params: { days: number; field: string; ancestor: string }) {
|
|
||||||
const { days, field, ancestor } = params;
|
|
||||||
if (days < 0) {
|
|
||||||
const msg = `'${field}' in ${ancestor} action must be nonnegative`;
|
|
||||||
return errors.InvalidArgument.customizeDescription(msg);
|
|
||||||
}
|
|
||||||
if (days > MAX_DAYS) {
|
|
||||||
return errors.MalformedXML.customizeDescription(
|
|
||||||
`'${field}' in ${ancestor} action must not exceed ${MAX_DAYS}`);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks the validity of the given storage class
|
|
||||||
* @param params - Given function parameters
|
|
||||||
* @param params.usedStorageClasses - Storage classes used in other
|
|
||||||
* rules
|
|
||||||
* @param params.storageClass - The storage class of the current
|
|
||||||
* rule
|
|
||||||
* @param params.ancestor - The immediate ancestor field
|
|
||||||
* @param params.prefix - The prefix of the rule
|
|
||||||
* @return Returns an error object or `null`
|
|
||||||
*/
|
|
||||||
_checkStorageClasses(params: {
|
|
||||||
usedStorageClasses: string[];
|
|
||||||
storageClass: string;
|
|
||||||
ancestor: string;
|
|
||||||
rule: { Prefix?: string[]; Filter?: any };
|
|
||||||
}) {
|
|
||||||
const { usedStorageClasses, storageClass, ancestor, rule } = params;
|
|
||||||
if (!this._storageClasses.includes(storageClass)) {
|
|
||||||
// This differs from the AWS message. This will help the user since
|
|
||||||
// the StorageClass does not conform to AWS specs.
|
|
||||||
const list = `'${this._storageClasses.join("', '")}'`;
|
|
||||||
const msg = `'StorageClass' must be one of ${list}`;
|
|
||||||
return errors.MalformedXML.customizeDescription(msg);
|
|
||||||
}
|
|
||||||
if (usedStorageClasses.includes(storageClass)) {
|
|
||||||
const msg = `'StorageClass' must be different for '${ancestor}' ` +
|
|
||||||
`actions in same 'Rule' with ${this._getRuleFilterDesc(rule)}`;
|
|
||||||
return errors.InvalidRequest.customizeDescription(msg);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Ensure that transition rules are at least a day apart from each other.
|
|
||||||
* @param params - Given function parameters
|
|
||||||
* @param [params.days] - The days of the current transition
|
|
||||||
* @param [params.date] - The date of the current transition
|
|
||||||
* @param params.storageClass - The storage class of the current
|
|
||||||
* rule
|
|
||||||
* @param params.rule - The current rule
|
|
||||||
*/
|
|
||||||
_checkTimeGap(params: {
|
|
||||||
days?: number;
|
|
||||||
date?: string;
|
|
||||||
storageClass: string;
|
|
||||||
rule: { Transition: any[]; Prefix?: string[]; Filter?: any };
|
|
||||||
}) {
|
|
||||||
const { days, date, storageClass, rule } = params;
|
|
||||||
const invalidTransition = rule.Transition.find(transition => {
|
|
||||||
if (storageClass === transition.StorageClass[0]) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (days !== undefined) {
|
|
||||||
return Number.parseInt(transition.Days[0], 10) === days;
|
|
||||||
}
|
|
||||||
if (date !== undefined) {
|
|
||||||
const timestamp = new Date(date).getTime();
|
|
||||||
const compareTimestamp = new Date(transition.Date[0]).getTime();
|
|
||||||
const oneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
|
|
||||||
return Math.abs(timestamp - compareTimestamp) < oneDay;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
});
|
|
||||||
if (invalidTransition) {
|
|
||||||
const timeType = days !== undefined ? 'Days' : 'Date';
|
|
||||||
const filterMsg = this._getRuleFilterDesc(rule);
|
|
||||||
const compareStorageClass = invalidTransition.StorageClass[0];
|
|
||||||
const msg = `'${timeType}' in the 'Transition' action for ` +
|
|
||||||
`StorageClass '${storageClass}' for ${filterMsg} must be at ` +
|
|
||||||
`least one day apart from ${filterMsg} in the 'Transition' ` +
|
|
||||||
`action for StorageClass '${compareStorageClass}'`;
|
|
||||||
return errors.InvalidArgument.customizeDescription(msg);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks transition time type (i.e. 'Date' or 'Days') only occurs once
|
|
||||||
* across transitions and across transitions and expiration policies
|
|
||||||
* @param params - Given function parameters
|
|
||||||
* @param params.usedTimeType - The time type that has been used by
|
|
||||||
* another rule
|
|
||||||
* @param params.currentTimeType - the time type used by the
|
|
||||||
* current rule
|
|
||||||
* @param params.rule - The current rule
|
|
||||||
* @return Returns an error object or `null`
|
|
||||||
*/
|
|
||||||
_checkTimeType(params: {
|
|
||||||
usedTimeType: string | null;
|
|
||||||
currentTimeType: string;
|
|
||||||
rule: { Prefix?: string[]; Filter?: any; Expiration?: any[] };
|
|
||||||
}) {
|
|
||||||
const { usedTimeType, currentTimeType, rule } = params;
|
|
||||||
if (usedTimeType && usedTimeType !== currentTimeType) {
|
|
||||||
const msg = "Found mixed 'Date' and 'Days' based Transition " +
|
|
||||||
'actions in lifecycle rule for ' +
|
|
||||||
`${this._getRuleFilterDesc(rule)}`;
|
|
||||||
return errors.InvalidRequest.customizeDescription(msg);
|
|
||||||
}
|
|
||||||
// Transition time type cannot differ from the expiration, if provided.
|
|
||||||
if (rule.Expiration &&
|
|
||||||
rule.Expiration[0][currentTimeType] === undefined) {
|
|
||||||
const msg = "Found mixed 'Date' and 'Days' based Expiration and " +
|
|
||||||
'Transition actions in lifecycle rule for ' +
|
|
||||||
`${this._getRuleFilterDesc(rule)}`;
|
|
||||||
return errors.InvalidRequest.customizeDescription(msg);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Checks the validity of the given date
|
|
||||||
* @param date - The date the check
|
|
||||||
* @return Returns an error object or `null`
|
|
||||||
*/
|
|
||||||
_checkDate(date: string) {
|
|
||||||
const isoRegex = new RegExp(
|
|
||||||
"^(-?(?:[1-9][0-9]*)?[0-9]{4})" + // Year
|
|
||||||
"-(1[0-2]|0[1-9])" + // Month
|
|
||||||
"-(3[01]|0[1-9]|[12][0-9])" + // Day
|
|
||||||
"T(2[0-3]|[01][0-9])" + // Hour
|
|
||||||
":([0-5][0-9])" + // Minute
|
|
||||||
":([0-5][0-9])" + // Second
|
|
||||||
"(\\.[0-9]+)?" + // Fractional second
|
|
||||||
"(Z|[+-][01][0-9]:[0-5][0-9])?$", // Timezone
|
|
||||||
"g"
|
|
||||||
);
|
|
||||||
const matches = [...date.matchAll(isoRegex)];
|
|
||||||
if (matches.length !== 1) {
|
|
||||||
const msg = 'Date must be in ISO 8601 format';
|
|
||||||
return errors.InvalidArgument.customizeDescription(msg);
|
|
||||||
}
|
|
||||||
// Check for a timezone in the last match group. If none, add a Z to indicate UTC.
|
|
||||||
if (!matches[0][matches[0].length-1]) {
|
|
||||||
date += 'Z';
|
|
||||||
}
|
|
||||||
const dateObj = new Date(date);
|
|
||||||
if (Number.isNaN(dateObj.getTime())) {
|
|
||||||
const msg = 'Date is not a valid date';
|
|
||||||
return errors.InvalidArgument.customizeDescription(msg);
|
|
||||||
}
|
|
||||||
if (dateObj.getUTCHours() !== 0
|
|
||||||
|| dateObj.getUTCMinutes() !== 0
|
|
||||||
|| dateObj.getUTCSeconds() !== 0
|
|
||||||
|| dateObj.getUTCMilliseconds() !== 0) {
|
|
||||||
const msg = '\'Date\' must be at midnight GMT';
|
|
||||||
return errors.InvalidArgument.customizeDescription(msg);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parses the NonCurrentVersionTransition value
|
|
||||||
* @param rule - Rule object from Rule array from this._parsedXml
|
|
||||||
* @return - Contains error if parsing failed, otherwise contains
|
|
||||||
* the parsed nonCurrentVersionTransition array
|
|
||||||
*
|
|
||||||
* Format of result:
|
|
||||||
* result = {
|
|
||||||
* error: <error>,
|
|
||||||
* nonCurrentVersionTransition: [
|
|
||||||
* {
|
|
||||||
* noncurrentDays: <non-current-days>,
|
|
||||||
* storageClass: <storage-class>,
|
|
||||||
* },
|
|
||||||
* ...
|
|
||||||
* ]
|
|
||||||
* }
|
|
||||||
*/
|
|
||||||
_parseNoncurrentVersionTransition(rule: {
|
|
||||||
NoncurrentVersionTransition: any[];
|
|
||||||
Prefix?: string[];
|
|
||||||
Filter?: any;
|
|
||||||
}) {
|
|
||||||
const nonCurrentVersionTransition: {
|
|
||||||
noncurrentDays: number;
|
|
||||||
storageClass: string;
|
|
||||||
}[] = [];
|
|
||||||
const usedStorageClasses: string[] = [];
|
|
||||||
for (let i = 0; i < rule.NoncurrentVersionTransition.length; i++) {
|
|
||||||
const t = rule.NoncurrentVersionTransition[i]; // Transition object
|
|
||||||
const noncurrentDays: number | undefined =
|
|
||||||
t.NoncurrentDays && Number.parseInt(t.NoncurrentDays[0], 10);
|
|
||||||
const storageClass: string | undefined = t.StorageClass && t.StorageClass[0];
|
|
||||||
if (noncurrentDays === undefined || storageClass === undefined) {
|
|
||||||
return { error: errors.MalformedXML };
|
|
||||||
}
|
|
||||||
let error = this._checkDays({
|
|
||||||
days: noncurrentDays,
|
|
||||||
field: 'NoncurrentDays',
|
|
||||||
ancestor: 'NoncurrentVersionTransition',
|
|
||||||
});
|
|
||||||
if (error) {
|
|
||||||
return { error };
|
|
||||||
}
|
|
||||||
error = this._checkStorageClasses({
|
|
||||||
storageClass,
|
|
||||||
usedStorageClasses,
|
|
||||||
ancestor: 'NoncurrentVersionTransition',
|
|
||||||
rule,
|
|
||||||
});
|
|
||||||
if (error) {
|
|
||||||
return { error };
|
|
||||||
}
|
|
||||||
nonCurrentVersionTransition.push({ noncurrentDays, storageClass });
|
|
||||||
usedStorageClasses.push(storageClass);
|
|
||||||
}
|
|
||||||
return { nonCurrentVersionTransition };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Parses the Transition value
|
|
||||||
* @param rule - Rule object from Rule array from this._parsedXml
|
|
||||||
* @return - Contains error if parsing failed, otherwise contains
|
|
||||||
* the parsed transition array
|
|
||||||
*
|
|
||||||
* Format of result:
|
|
||||||
* result = {
|
|
||||||
* error: <error>,
|
|
||||||
* transition: [
|
|
||||||
* {
|
|
||||||
* days: <days>,
|
|
||||||
* date: <date>,
|
|
||||||
* storageClass: <storage-class>,
|
|
||||||
* },
|
|
||||||
* ...
|
|
||||||
* ]
|
|
||||||
* }
|
|
||||||
*/
|
|
||||||
_parseTransition(rule: {
|
|
||||||
Transition: any[];
|
|
||||||
Prefix?: string[];
|
|
||||||
Filter?: any;
|
|
||||||
}) {
|
|
||||||
const transition:
|
|
||||||
({ days: number; storageClass: string }
|
|
||||||
| { date: string; storageClass: string })[] = [];
|
|
||||||
const usedStorageClasses: string[] = [];
|
|
||||||
let usedTimeType: string | null = null;
|
|
||||||
for (let i = 0; i < rule.Transition.length; i++) {
|
|
||||||
const t = rule.Transition[i]; // Transition object
|
|
||||||
const days = t.Days && Number.parseInt(t.Days[0], 10);
|
|
||||||
const date = t.Date && t.Date[0];
|
|
||||||
const storageClass = t.StorageClass && t.StorageClass[0];
|
|
||||||
if ((days === undefined && date === undefined) ||
|
|
||||||
(days !== undefined && date !== undefined) ||
|
|
||||||
(storageClass === undefined)) {
|
|
||||||
return { error: errors.MalformedXML };
|
|
||||||
}
|
|
||||||
let error = this._checkStorageClasses({
|
|
||||||
storageClass,
|
|
||||||
usedStorageClasses,
|
|
||||||
ancestor: 'Transition',
|
|
||||||
rule,
|
|
||||||
});
|
|
||||||
if (error) {
|
|
||||||
return { error };
|
|
||||||
}
|
|
||||||
usedStorageClasses.push(storageClass);
|
|
||||||
if (days !== undefined) {
|
|
||||||
error = this._checkTimeType({
|
|
||||||
usedTimeType,
|
|
||||||
currentTimeType: 'Days',
|
|
||||||
rule,
|
|
||||||
});
|
|
||||||
if (error) {
|
|
||||||
return { error };
|
|
||||||
}
|
|
||||||
usedTimeType = 'Days';
|
|
||||||
error = this._checkDays({
|
|
||||||
days,
|
|
||||||
field: 'Days',
|
|
||||||
ancestor: 'Transition',
|
|
||||||
});
|
|
||||||
if (error) {
|
|
||||||
return { error };
|
|
||||||
}
|
|
||||||
transition.push({ days, storageClass });
|
|
||||||
}
|
|
||||||
if (date !== undefined) {
|
|
||||||
error = this._checkTimeType({
|
|
||||||
usedTimeType,
|
|
||||||
currentTimeType: 'Date',
|
|
||||||
rule,
|
|
||||||
});
|
|
||||||
if (error) {
|
|
||||||
return { error };
|
|
||||||
}
|
|
||||||
usedTimeType = 'Date';
|
|
||||||
error = this._checkDate(date);
|
|
||||||
if (error) {
|
|
||||||
return { error };
|
|
||||||
}
|
|
||||||
transition.push({ date, storageClass });
|
|
||||||
}
|
|
||||||
error = this._checkTimeGap({ days, date, storageClass, rule });
|
|
||||||
if (error) {
|
|
||||||
return { error };
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return { transition };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check that action component of rule is valid
|
* Check that action component of rule is valid
|
||||||
* @param rule - a rule object from Rule array from this._parsedXml
|
* @param rule - a rule object from Rule array from this._parsedXml
|
||||||
|
@ -889,13 +528,8 @@ export default class LifecycleConfiguration {
|
||||||
propName: 'actions',
|
propName: 'actions',
|
||||||
actions: [],
|
actions: [],
|
||||||
};
|
};
|
||||||
const validActions = [
|
const validActions = ['AbortIncompleteMultipartUpload',
|
||||||
'AbortIncompleteMultipartUpload',
|
'Expiration', 'NoncurrentVersionExpiration'];
|
||||||
'Expiration',
|
|
||||||
'NoncurrentVersionExpiration',
|
|
||||||
'NoncurrentVersionTransition',
|
|
||||||
'Transition',
|
|
||||||
];
|
|
||||||
validActions.forEach(a => {
|
validActions.forEach(a => {
|
||||||
if (rule[a]) {
|
if (rule[a]) {
|
||||||
actionsObj.actions.push({ actionName: `${a}` });
|
actionsObj.actions.push({ actionName: `${a}` });
|
||||||
|
@ -912,14 +546,7 @@ export default class LifecycleConfiguration {
|
||||||
if (action.error) {
|
if (action.error) {
|
||||||
actionsObj.error = action.error;
|
actionsObj.error = action.error;
|
||||||
} else {
|
} else {
|
||||||
const actionTimes = [
|
const actionTimes = ['days', 'date', 'deleteMarker', 'newerNoncurrentVersions'];
|
||||||
'days',
|
|
||||||
'date',
|
|
||||||
'deleteMarker',
|
|
||||||
'transition',
|
|
||||||
'nonCurrentVersionTransition',
|
|
||||||
'newerNoncurrentVersions'
|
|
||||||
];
|
|
||||||
actionTimes.forEach(t => {
|
actionTimes.forEach(t => {
|
||||||
if (action[t]) {
|
if (action[t]) {
|
||||||
// eslint-disable-next-line no-param-reassign
|
// eslint-disable-next-line no-param-reassign
|
||||||
|
@ -1013,9 +640,12 @@ export default class LifecycleConfiguration {
|
||||||
return { error };
|
return { error };
|
||||||
}
|
}
|
||||||
if (subExp.Date) {
|
if (subExp.Date) {
|
||||||
const error = this._checkDate(subExp.Date[0]);
|
const isoRegex = new RegExp('^(-?(?:[1-9][0-9]*)?[0-9]{4})-' +
|
||||||
if (error) {
|
'(1[0-2]|0[1-9])-(3[01]|0[1-9]|[12][0-9])T(2[0-3]|[01][0-9])' +
|
||||||
expObj.error = error;
|
':([0-5][0-9]):([0-5][0-9])(.[0-9]+)?(Z)?$');
|
||||||
|
if (!isoRegex.test(subExp.Date[0])) {
|
||||||
|
expObj.error = errors.InvalidArgument.customizeDescription(
|
||||||
|
'Date must be in ISO 8601 format');
|
||||||
} else {
|
} else {
|
||||||
expObj.date = subExp.Date[0];
|
expObj.date = subExp.Date[0];
|
||||||
}
|
}
|
||||||
|
@ -1153,26 +783,6 @@ export default class LifecycleConfiguration {
|
||||||
if (a.deleteMarker) {
|
if (a.deleteMarker) {
|
||||||
assert.strictEqual(typeof a.deleteMarker, 'string');
|
assert.strictEqual(typeof a.deleteMarker, 'string');
|
||||||
}
|
}
|
||||||
if (a.nonCurrentVersionTransition) {
|
|
||||||
assert.strictEqual(
|
|
||||||
typeof a.nonCurrentVersionTransition, 'object');
|
|
||||||
a.nonCurrentVersionTransition.forEach(t => {
|
|
||||||
assert.strictEqual(typeof t.noncurrentDays, 'number');
|
|
||||||
assert.strictEqual(typeof t.storageClass, 'string');
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (a.transition) {
|
|
||||||
assert.strictEqual(typeof a.transition, 'object');
|
|
||||||
a.transition.forEach(t => {
|
|
||||||
if (t.days || t.days === 0) {
|
|
||||||
assert.strictEqual(typeof t.days, 'number');
|
|
||||||
}
|
|
||||||
if (t.date !== undefined) {
|
|
||||||
assert.strictEqual(typeof t.date, 'string');
|
|
||||||
}
|
|
||||||
assert.strictEqual(typeof t.storageClass, 'string');
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (a.newerNoncurrentVersions) {
|
if (a.newerNoncurrentVersions) {
|
||||||
assert.strictEqual(typeof a.newerNoncurrentVersions, 'number');
|
assert.strictEqual(typeof a.newerNoncurrentVersions, 'number');
|
||||||
|
@ -1226,15 +836,7 @@ export default class LifecycleConfiguration {
|
||||||
}
|
}
|
||||||
|
|
||||||
const Actions = actions.map(action => {
|
const Actions = actions.map(action => {
|
||||||
const {
|
const { actionName, days, date, deleteMarker, newerNoncurrentVersions } = action;
|
||||||
actionName,
|
|
||||||
days,
|
|
||||||
date,
|
|
||||||
deleteMarker,
|
|
||||||
nonCurrentVersionTransition,
|
|
||||||
transition,
|
|
||||||
newerNoncurrentVersions,
|
|
||||||
} = action;
|
|
||||||
let Action: any;
|
let Action: any;
|
||||||
if (actionName === 'AbortIncompleteMultipartUpload') {
|
if (actionName === 'AbortIncompleteMultipartUpload') {
|
||||||
Action = `<${actionName}><DaysAfterInitiation>${days}` +
|
Action = `<${actionName}><DaysAfterInitiation>${days}` +
|
||||||
|
@ -1253,40 +855,6 @@ export default class LifecycleConfiguration {
|
||||||
Action = `<${actionName}>${Days}${Date}${DelMarker}` +
|
Action = `<${actionName}>${Days}${Date}${DelMarker}` +
|
||||||
`</${actionName}>`;
|
`</${actionName}>`;
|
||||||
}
|
}
|
||||||
if (actionName === 'NoncurrentVersionTransition') {
|
|
||||||
const xml: string[] = [];
|
|
||||||
nonCurrentVersionTransition!.forEach(transition => {
|
|
||||||
const { noncurrentDays, storageClass } = transition;
|
|
||||||
xml.push(
|
|
||||||
`<${actionName}>`,
|
|
||||||
`<NoncurrentDays>${noncurrentDays}` +
|
|
||||||
'</NoncurrentDays>',
|
|
||||||
`<StorageClass>${storageClass}</StorageClass>`,
|
|
||||||
`</${actionName}>`,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
Action = xml.join('');
|
|
||||||
}
|
|
||||||
if (actionName === 'Transition') {
|
|
||||||
const xml: string[] = [];
|
|
||||||
transition!.forEach(transition => {
|
|
||||||
const { days, date, storageClass } = transition;
|
|
||||||
let element: string = '';
|
|
||||||
if (days !== undefined) {
|
|
||||||
element = `<Days>${days}</Days>`;
|
|
||||||
}
|
|
||||||
if (date !== undefined) {
|
|
||||||
element = `<Date>${date}</Date>`;
|
|
||||||
}
|
|
||||||
xml.push(
|
|
||||||
`<${actionName}>`,
|
|
||||||
element,
|
|
||||||
`<StorageClass>${storageClass}</StorageClass>`,
|
|
||||||
`</${actionName}>`,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
Action = xml.join('');
|
|
||||||
}
|
|
||||||
return Action;
|
return Action;
|
||||||
}).join('');
|
}).join('');
|
||||||
return `<Rule>${ID}${Status}${Filter}${Actions}</Rule>`;
|
return `<Rule>${ID}${Status}${Filter}${Actions}</Rule>`;
|
||||||
|
@ -1369,15 +937,6 @@ export type Rule = {
|
||||||
date?: number;
|
date?: number;
|
||||||
deleteMarker?: boolean;
|
deleteMarker?: boolean;
|
||||||
newerNoncurrentVersions?: number;
|
newerNoncurrentVersions?: number;
|
||||||
nonCurrentVersionTransition?: {
|
|
||||||
noncurrentDays: number;
|
|
||||||
storageClass: string;
|
|
||||||
}[];
|
|
||||||
transition?: {
|
|
||||||
days?: number;
|
|
||||||
date?: string;
|
|
||||||
storageClass: string;
|
|
||||||
}[];
|
|
||||||
}[];
|
}[];
|
||||||
filter?: {
|
filter?: {
|
||||||
rulePrefix?: string;
|
rulePrefix?: string;
|
||||||
|
|
|
@ -28,7 +28,6 @@ export default class LifecycleRule {
|
||||||
ncvExpiration?: NoncurrentExpiration;
|
ncvExpiration?: NoncurrentExpiration;
|
||||||
abortMPU?: { DaysAfterInitiation: number };
|
abortMPU?: { DaysAfterInitiation: number };
|
||||||
transitions?: any[];
|
transitions?: any[];
|
||||||
ncvTransitions?: any[];
|
|
||||||
prefix?: string;
|
prefix?: string;
|
||||||
|
|
||||||
constructor(id: string, status: Status) {
|
constructor(id: string, status: Status) {
|
||||||
|
@ -46,7 +45,6 @@ export default class LifecycleRule {
|
||||||
NoncurrentVersionExpiration?: NoncurrentExpiration;
|
NoncurrentVersionExpiration?: NoncurrentExpiration;
|
||||||
AbortIncompleteMultipartUpload?: { DaysAfterInitiation: number };
|
AbortIncompleteMultipartUpload?: { DaysAfterInitiation: number };
|
||||||
Transitions?: any[];
|
Transitions?: any[];
|
||||||
NoncurrentVersionTransitions?: any[];
|
|
||||||
Filter?: Filter;
|
Filter?: Filter;
|
||||||
Prefix?: '';
|
Prefix?: '';
|
||||||
} = { ID: this.id, Status: this.status };
|
} = { ID: this.id, Status: this.status };
|
||||||
|
@ -63,9 +61,6 @@ export default class LifecycleRule {
|
||||||
if (this.transitions) {
|
if (this.transitions) {
|
||||||
rule.Transitions = this.transitions;
|
rule.Transitions = this.transitions;
|
||||||
}
|
}
|
||||||
if (this.ncvTransitions) {
|
|
||||||
rule.NoncurrentVersionTransitions = this.ncvTransitions;
|
|
||||||
}
|
|
||||||
|
|
||||||
const filter = this.buildFilter();
|
const filter = this.buildFilter();
|
||||||
|
|
||||||
|
@ -178,13 +173,4 @@ export default class LifecycleRule {
|
||||||
this.transitions = transitions;
|
this.transitions = transitions;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* NonCurrentVersionTransitions
|
|
||||||
* @param nvcTransitions - NonCurrentVersionTransitions
|
|
||||||
*/
|
|
||||||
addNCVTransitions(nvcTransitions) {
|
|
||||||
this.ncvTransitions = nvcTransitions;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,9 @@
|
||||||
import * as crypto from 'crypto';
|
|
||||||
import * as constants from '../constants';
|
import * as constants from '../constants';
|
||||||
import * as VersionIDUtils from '../versioning/VersionID';
|
import * as VersionIDUtils from '../versioning/VersionID';
|
||||||
import { VersioningConstants } from '../versioning/constants';
|
|
||||||
import ObjectMDLocation, {
|
import ObjectMDLocation, {
|
||||||
ObjectMDLocationData,
|
ObjectMDLocationData,
|
||||||
Location,
|
Location,
|
||||||
} from './ObjectMDLocation';
|
} from './ObjectMDLocation';
|
||||||
import ObjectMDAmzRestore from './ObjectMDAmzRestore';
|
|
||||||
import ObjectMDArchive from './ObjectMDArchive';
|
|
||||||
|
|
||||||
export type ACL = {
|
export type ACL = {
|
||||||
Canned: string;
|
Canned: string;
|
||||||
|
@ -32,7 +28,6 @@ export type ReplicationInfo = {
|
||||||
role: string;
|
role: string;
|
||||||
storageType: string;
|
storageType: string;
|
||||||
dataStoreVersionId: string;
|
dataStoreVersionId: string;
|
||||||
isNFS: boolean | null;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export type ObjectMDData = {
|
export type ObjectMDData = {
|
||||||
|
@ -40,26 +35,24 @@ export type ObjectMDData = {
|
||||||
'owner-id': string;
|
'owner-id': string;
|
||||||
'cache-control': string;
|
'cache-control': string;
|
||||||
'content-disposition': string;
|
'content-disposition': string;
|
||||||
'content-language': string;
|
|
||||||
'content-encoding': string;
|
'content-encoding': string;
|
||||||
'creation-time'?: string;
|
|
||||||
'last-modified'?: string;
|
'last-modified'?: string;
|
||||||
expires: string;
|
expires: string;
|
||||||
'content-length': number;
|
'content-length': number;
|
||||||
'content-type': string;
|
'content-type': string;
|
||||||
'content-md5': string;
|
'content-md5': string;
|
||||||
|
// simple/no version. will expand once object versioning is
|
||||||
|
// introduced
|
||||||
'x-amz-version-id': 'null' | string;
|
'x-amz-version-id': 'null' | string;
|
||||||
'x-amz-server-version-id': string;
|
'x-amz-server-version-id': string;
|
||||||
'x-amz-restore'?: ObjectMDAmzRestore;
|
// TODO: Handle this as a utility function for all object puts
|
||||||
archive?: ObjectMDArchive;
|
// similar to normalizing request but after checkAuth so
|
||||||
|
// string to sign is not impacted. This is GH Issue#89.
|
||||||
'x-amz-storage-class': string;
|
'x-amz-storage-class': string;
|
||||||
'x-amz-server-side-encryption': string;
|
'x-amz-server-side-encryption': string;
|
||||||
'x-amz-server-side-encryption-aws-kms-key-id': string;
|
'x-amz-server-side-encryption-aws-kms-key-id': string;
|
||||||
'x-amz-server-side-encryption-customer-algorithm': string;
|
'x-amz-server-side-encryption-customer-algorithm': string;
|
||||||
'x-amz-website-redirect-location': string;
|
'x-amz-website-redirect-location': string;
|
||||||
'x-amz-scal-transition-in-progress'?: boolean;
|
|
||||||
'x-amz-scal-transition-time'?: string;
|
|
||||||
azureInfo?: any;
|
|
||||||
acl: ACL;
|
acl: ACL;
|
||||||
key: string;
|
key: string;
|
||||||
location: null | Location[];
|
location: null | Location[];
|
||||||
|
@ -79,17 +72,6 @@ export type ObjectMDData = {
|
||||||
replicationInfo: ReplicationInfo;
|
replicationInfo: ReplicationInfo;
|
||||||
dataStoreName: string;
|
dataStoreName: string;
|
||||||
originOp: string;
|
originOp: string;
|
||||||
microVersionId?: string;
|
|
||||||
// Deletion flag
|
|
||||||
// Used for keeping object metadata in the oplog event
|
|
||||||
// In case of a deletion the flag is first updated before
|
|
||||||
// deleting the object
|
|
||||||
deleted: boolean;
|
|
||||||
// PHD flag indicates whether the object is a temporary placeholder.
|
|
||||||
// This is the case when the latest version of an object gets deleted
|
|
||||||
// the master is set as a placeholder and gets updated with the new latest
|
|
||||||
// version data after a certain amount of time.
|
|
||||||
isPHD: boolean;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -118,17 +100,9 @@ export default class ObjectMD {
|
||||||
} else {
|
} else {
|
||||||
this._updateFromParsedJSON(objMd);
|
this._updateFromParsedJSON(objMd);
|
||||||
}
|
}
|
||||||
if (!this._data['creation-time']) {
|
|
||||||
const lastModified = this.getLastModified();
|
|
||||||
if (lastModified) {
|
|
||||||
this.setCreationTime(lastModified);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
// set newly-created object md modified time to current time
|
// set newly-created object md modified time to current time
|
||||||
const dt = new Date().toJSON();
|
this._data['last-modified'] = new Date().toJSON();
|
||||||
this.setLastModified(dt);
|
|
||||||
this.setCreationTime(dt);
|
|
||||||
}
|
}
|
||||||
// set latest md model version now that we ensured
|
// set latest md model version now that we ensured
|
||||||
// backward-compat conversion
|
// backward-compat conversion
|
||||||
|
@ -183,8 +157,6 @@ export default class ObjectMD {
|
||||||
'content-length': 0,
|
'content-length': 0,
|
||||||
'content-type': '',
|
'content-type': '',
|
||||||
'content-md5': '',
|
'content-md5': '',
|
||||||
'content-language': '',
|
|
||||||
'creation-time': undefined,
|
|
||||||
// simple/no version. will expand once object versioning is
|
// simple/no version. will expand once object versioning is
|
||||||
// introduced
|
// introduced
|
||||||
'x-amz-version-id': 'null',
|
'x-amz-version-id': 'null',
|
||||||
|
@ -197,7 +169,6 @@ export default class ObjectMD {
|
||||||
'x-amz-server-side-encryption-aws-kms-key-id': '',
|
'x-amz-server-side-encryption-aws-kms-key-id': '',
|
||||||
'x-amz-server-side-encryption-customer-algorithm': '',
|
'x-amz-server-side-encryption-customer-algorithm': '',
|
||||||
'x-amz-website-redirect-location': '',
|
'x-amz-website-redirect-location': '',
|
||||||
'x-amz-scal-transition-in-progress': false,
|
|
||||||
acl: {
|
acl: {
|
||||||
Canned: 'private',
|
Canned: 'private',
|
||||||
FULL_CONTROL: [],
|
FULL_CONTROL: [],
|
||||||
|
@ -207,7 +178,6 @@ export default class ObjectMD {
|
||||||
},
|
},
|
||||||
key: '',
|
key: '',
|
||||||
location: null,
|
location: null,
|
||||||
azureInfo: undefined,
|
|
||||||
// versionId, isNull, nullVersionId and isDeleteMarker
|
// versionId, isNull, nullVersionId and isDeleteMarker
|
||||||
// should be undefined when not set explicitly
|
// should be undefined when not set explicitly
|
||||||
isNull: undefined,
|
isNull: undefined,
|
||||||
|
@ -227,12 +197,9 @@ export default class ObjectMD {
|
||||||
role: '',
|
role: '',
|
||||||
storageType: '',
|
storageType: '',
|
||||||
dataStoreVersionId: '',
|
dataStoreVersionId: '',
|
||||||
isNFS: null,
|
|
||||||
},
|
},
|
||||||
dataStoreName: '',
|
dataStoreName: '',
|
||||||
originOp: '',
|
originOp: '',
|
||||||
deleted: false,
|
|
||||||
isPHD: false,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -462,50 +429,6 @@ export default class ObjectMD {
|
||||||
return this._data['content-md5'];
|
return this._data['content-md5'];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Set content-language
|
|
||||||
*
|
|
||||||
* @param contentLanguage - content-language
|
|
||||||
* @return itself
|
|
||||||
*/
|
|
||||||
setContentLanguage(contentLanguage: string) {
|
|
||||||
this._data['content-language'] = contentLanguage;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns content-language
|
|
||||||
*
|
|
||||||
* @return content-language
|
|
||||||
*/
|
|
||||||
getContentLanguage() {
|
|
||||||
return this._data['content-language'];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set Creation Date
|
|
||||||
*
|
|
||||||
* @param creationTime - Creation Date
|
|
||||||
* @return itself
|
|
||||||
*/
|
|
||||||
setCreationTime(creationTime: string) {
|
|
||||||
this._data['creation-time'] = creationTime;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns Creation Date
|
|
||||||
*
|
|
||||||
* @return Creation Date
|
|
||||||
*/
|
|
||||||
getCreationTime() {
|
|
||||||
// If creation-time is not set fallback to LastModified
|
|
||||||
if (!this._data['creation-time']) {
|
|
||||||
return this.getLastModified();
|
|
||||||
}
|
|
||||||
return this._data['creation-time'];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set version id
|
* Set version id
|
||||||
*
|
*
|
||||||
|
@ -646,48 +569,6 @@ export default class ObjectMD {
|
||||||
return this._data['x-amz-website-redirect-location'];
|
return this._data['x-amz-website-redirect-location'];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Set metadata transition in progress value
|
|
||||||
*
|
|
||||||
* @param inProgress - True if transition is in progress, false otherwise
|
|
||||||
* @param transitionTime - Date when the transition started
|
|
||||||
* @return itself
|
|
||||||
*/
|
|
||||||
setTransitionInProgress(inProgress: false): this
|
|
||||||
setTransitionInProgress(inProgress: true, transitionTime: Date|string|number): this
|
|
||||||
setTransitionInProgress(inProgress: boolean, transitionTime?: Date|string|number) {
|
|
||||||
this._data['x-amz-scal-transition-in-progress'] = inProgress;
|
|
||||||
if (!inProgress || !transitionTime) {
|
|
||||||
delete this._data['x-amz-scal-transition-time'];
|
|
||||||
} else {
|
|
||||||
if (typeof transitionTime === 'number') {
|
|
||||||
transitionTime = new Date(transitionTime);
|
|
||||||
}
|
|
||||||
if (transitionTime instanceof Date) {
|
|
||||||
transitionTime = transitionTime.toISOString();
|
|
||||||
}
|
|
||||||
this._data['x-amz-scal-transition-time'] = transitionTime;
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get metadata transition in progress value
|
|
||||||
*
|
|
||||||
* @return True if transition is in progress, false otherwise
|
|
||||||
*/
|
|
||||||
getTransitionInProgress() {
|
|
||||||
return this._data['x-amz-scal-transition-in-progress'];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the transition time of the object.
|
|
||||||
* @returns The transition time of the object.
|
|
||||||
*/
|
|
||||||
getTransitionTime() {
|
|
||||||
return this._data['x-amz-scal-transition-time'];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set access control list
|
* Set access control list
|
||||||
*
|
*
|
||||||
|
@ -793,29 +674,6 @@ export default class ObjectMD {
|
||||||
return reducedLocations;
|
return reducedLocations;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the Azure specific information
|
|
||||||
* @param azureInfo - a plain JS structure representing the
|
|
||||||
* Azure specific information for a Blob or a Container (see constructor
|
|
||||||
* of {@link ObjectMDAzureInfo} for a description of the fields of this
|
|
||||||
* structure
|
|
||||||
* @return itself
|
|
||||||
*/
|
|
||||||
setAzureInfo(azureInfo: any) {
|
|
||||||
this._data.azureInfo = azureInfo;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the Azure specific information
|
|
||||||
* @return a plain JS structure representing the Azure specific
|
|
||||||
* information for a Blob or a Container an suitable for the constructor
|
|
||||||
* of {@link ObjectMDAzureInfo}.
|
|
||||||
*/
|
|
||||||
getAzureInfo() {
|
|
||||||
return this._data.azureInfo;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set metadata isNull value
|
* Set metadata isNull value
|
||||||
*
|
*
|
||||||
|
@ -922,19 +780,6 @@ export default class ObjectMD {
|
||||||
return this._data.isDeleteMarker || false;
|
return this._data.isDeleteMarker || false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get if the object is a multipart upload (MPU)
|
|
||||||
*
|
|
||||||
* The function checks the "content-md5" field: if it contains a
|
|
||||||
* dash ('-') it is a MPU, as the content-md5 string ends with
|
|
||||||
* "-[nbparts]" for MPUs.
|
|
||||||
*
|
|
||||||
* @return Whether object is a multipart upload
|
|
||||||
*/
|
|
||||||
isMultipartUpload() {
|
|
||||||
return this.getContentMd5().includes('-');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set metadata versionId value
|
* Set metadata versionId value
|
||||||
*
|
*
|
||||||
|
@ -952,9 +797,6 @@ export default class ObjectMD {
|
||||||
* @return The object versionId
|
* @return The object versionId
|
||||||
*/
|
*/
|
||||||
getVersionId() {
|
getVersionId() {
|
||||||
if (this.getIsNull()) {
|
|
||||||
return VersioningConstants.ExternalNullVersionId;
|
|
||||||
}
|
|
||||||
return this._data.versionId;
|
return this._data.versionId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -962,16 +804,13 @@ export default class ObjectMD {
|
||||||
* Get metadata versionId value in encoded form (the one visible
|
* Get metadata versionId value in encoded form (the one visible
|
||||||
* to the S3 API user)
|
* to the S3 API user)
|
||||||
*
|
*
|
||||||
* @return {undefined|string} The encoded object versionId
|
* @return The encoded object versionId
|
||||||
*/
|
*/
|
||||||
getEncodedVersionId() {
|
getEncodedVersionId() {
|
||||||
const versionId = this.getVersionId();
|
const versionId = this.getVersionId();
|
||||||
if (versionId === VersioningConstants.ExternalNullVersionId) {
|
if (versionId) {
|
||||||
return versionId;
|
|
||||||
} else if (versionId) {
|
|
||||||
return VersionIDUtils.encode(versionId);
|
return VersionIDUtils.encode(versionId);
|
||||||
}
|
}
|
||||||
return undefined;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1014,20 +853,6 @@ export default class ObjectMD {
|
||||||
return this._data.tags;
|
return this._data.tags;
|
||||||
}
|
}
|
||||||
|
|
||||||
getUserMetadata() {
|
|
||||||
const metaHeaders = {};
|
|
||||||
const data = this.getValue();
|
|
||||||
Object.keys(data).forEach(key => {
|
|
||||||
if (key.startsWith('x-amz-meta-')) {
|
|
||||||
metaHeaders[key] = data[key];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (Object.keys(metaHeaders).length > 0) {
|
|
||||||
return JSON.stringify(metaHeaders);
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set replication information
|
* Set replication information
|
||||||
*
|
*
|
||||||
|
@ -1043,7 +868,6 @@ export default class ObjectMD {
|
||||||
role: string;
|
role: string;
|
||||||
storageType?: string;
|
storageType?: string;
|
||||||
dataStoreVersionId?: string;
|
dataStoreVersionId?: string;
|
||||||
isNFS?: boolean;
|
|
||||||
}) {
|
}) {
|
||||||
const {
|
const {
|
||||||
status,
|
status,
|
||||||
|
@ -1054,7 +878,6 @@ export default class ObjectMD {
|
||||||
role,
|
role,
|
||||||
storageType,
|
storageType,
|
||||||
dataStoreVersionId,
|
dataStoreVersionId,
|
||||||
isNFS,
|
|
||||||
} = replicationInfo;
|
} = replicationInfo;
|
||||||
this._data.replicationInfo = {
|
this._data.replicationInfo = {
|
||||||
status,
|
status,
|
||||||
|
@ -1065,7 +888,6 @@ export default class ObjectMD {
|
||||||
role,
|
role,
|
||||||
storageType: storageType || '',
|
storageType: storageType || '',
|
||||||
dataStoreVersionId: dataStoreVersionId || '',
|
dataStoreVersionId: dataStoreVersionId || '',
|
||||||
isNFS: isNFS || null,
|
|
||||||
};
|
};
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
@ -1084,24 +906,6 @@ export default class ObjectMD {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Set whether the replication is occurring from an NFS bucket.
|
|
||||||
* @param isNFS - Whether replication from an NFS bucket
|
|
||||||
* @return itself
|
|
||||||
*/
|
|
||||||
setReplicationIsNFS(isNFS: boolean) {
|
|
||||||
this._data.replicationInfo.isNFS = isNFS;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get whether the replication is occurring from an NFS bucket.
|
|
||||||
* @return Whether replication from an NFS bucket
|
|
||||||
*/
|
|
||||||
getReplicationIsNFS() {
|
|
||||||
return this._data.replicationInfo.isNFS;
|
|
||||||
}
|
|
||||||
|
|
||||||
setReplicationSiteStatus(site: string, status: string) {
|
setReplicationSiteStatus(site: string, status: string) {
|
||||||
const backend = this._data.replicationInfo.backends.find(
|
const backend = this._data.replicationInfo.backends.find(
|
||||||
(o) => o.site === site
|
(o) => o.site === site
|
||||||
|
@ -1152,11 +956,6 @@ export default class ObjectMD {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
setReplicationStorageType(storageType: string) {
|
|
||||||
this._data.replicationInfo.storageType = storageType;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
setReplicationStorageClass(storageClass: string) {
|
setReplicationStorageClass(storageClass: string) {
|
||||||
this._data.replicationInfo.storageClass = storageClass;
|
this._data.replicationInfo.storageClass = storageClass;
|
||||||
return this;
|
return this;
|
||||||
|
@ -1238,9 +1037,6 @@ export default class ObjectMD {
|
||||||
Object.keys(metaHeaders).forEach((key) => {
|
Object.keys(metaHeaders).forEach((key) => {
|
||||||
if (key.startsWith('x-amz-meta-')) {
|
if (key.startsWith('x-amz-meta-')) {
|
||||||
this._data[key] = metaHeaders[key];
|
this._data[key] = metaHeaders[key];
|
||||||
} else if (key.startsWith('x-ms-meta-')) {
|
|
||||||
const _key = key.replace('x-ms-meta-', 'x-amz-meta-');
|
|
||||||
this._data[_key] = metaHeaders[key];
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
// If a multipart object and the acl is already parsed, we update it
|
// If a multipart object and the acl is already parsed, we update it
|
||||||
|
@ -1250,20 +1046,6 @@ export default class ObjectMD {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Clear all existing meta headers (used for Azure)
|
|
||||||
*
|
|
||||||
* @return itself
|
|
||||||
*/
|
|
||||||
clearMetadataValues() {
|
|
||||||
Object.keys(this._data).forEach(key => {
|
|
||||||
if (key.startsWith('x-amz-meta')) {
|
|
||||||
delete this._data[key];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* overrideMetadataValues (used for complete MPU and object copy)
|
* overrideMetadataValues (used for complete MPU and object copy)
|
||||||
*
|
*
|
||||||
|
@ -1275,38 +1057,6 @@ export default class ObjectMD {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Create or update the microVersionId field
|
|
||||||
*
|
|
||||||
* This field can be used to force an update in MongoDB. This can
|
|
||||||
* be needed in the following cases:
|
|
||||||
*
|
|
||||||
* - in case no other metadata field changes
|
|
||||||
*
|
|
||||||
* - to detect a change when fields change but object version does
|
|
||||||
* not change e.g. when ingesting a putObjectTagging coming from
|
|
||||||
* S3C to Zenko
|
|
||||||
*
|
|
||||||
* - to manage conflicts during concurrent updates, using
|
|
||||||
* conditions on the microVersionId field.
|
|
||||||
*
|
|
||||||
* It's a field of 16 hexadecimal characters randomly generated
|
|
||||||
*
|
|
||||||
* @return itself
|
|
||||||
*/
|
|
||||||
updateMicroVersionId() {
|
|
||||||
this._data.microVersionId = crypto.randomBytes(8).toString('hex');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the microVersionId field, or null if not set
|
|
||||||
*
|
|
||||||
* @return the microVersionId field if exists, or {null} if it does not exist
|
|
||||||
*/
|
|
||||||
getMicroVersionId() {
|
|
||||||
return this._data.microVersionId || null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set object legal hold status
|
* Set object legal hold status
|
||||||
* @param legalHold - true if legal hold is 'ON' false if 'OFF'
|
* @param legalHold - true if legal hold is 'ON' false if 'OFF'
|
||||||
|
@ -1387,98 +1137,4 @@ export default class ObjectMD {
|
||||||
getValue() {
|
getValue() {
|
||||||
return this._data;
|
return this._data;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get x-amz-restore
|
|
||||||
*
|
|
||||||
* @returns x-amz-restore
|
|
||||||
*/
|
|
||||||
getAmzRestore() {
|
|
||||||
return this._data['x-amz-restore'];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set x-amz-restore
|
|
||||||
*
|
|
||||||
* @param value x-amz-restore object
|
|
||||||
* @returns itself
|
|
||||||
* @throws case of invalid parameter
|
|
||||||
*/
|
|
||||||
setAmzRestore(value?: ObjectMDAmzRestore) {
|
|
||||||
if (value) {
|
|
||||||
// Accept object instance of ObjectMDAmzRestore and Object
|
|
||||||
if (!(value instanceof ObjectMDAmzRestore) && !ObjectMDAmzRestore.isValid(value)) {
|
|
||||||
throw new Error('x-amz-restore must be type of ObjectMDAmzRestore.');
|
|
||||||
}
|
|
||||||
this._data['x-amz-restore'] = value;
|
|
||||||
} else {
|
|
||||||
delete this._data['x-amz-restore'];
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get archive
|
|
||||||
*
|
|
||||||
* @returns archive
|
|
||||||
*/
|
|
||||||
getArchive() {
|
|
||||||
return this._data.archive;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set archive
|
|
||||||
*
|
|
||||||
* @param value archive object
|
|
||||||
* @returns itself
|
|
||||||
* @throws case of invalid parameter
|
|
||||||
*/
|
|
||||||
setArchive(value: ObjectMDArchive) {
|
|
||||||
if (value) {
|
|
||||||
// Accept object instance of ObjectMDArchive and Object
|
|
||||||
if (!(value instanceof ObjectMDArchive) && !ObjectMDArchive.isValid(value)) {
|
|
||||||
throw new Error('archive is must be type of ObjectMDArchive.');
|
|
||||||
}
|
|
||||||
this._data.archive = value;
|
|
||||||
} else {
|
|
||||||
delete this._data.archive;
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set deleted flag
|
|
||||||
* @param {Boolean} value deleted object
|
|
||||||
* @return {ObjectMD}
|
|
||||||
*/
|
|
||||||
setDeleted(value) {
|
|
||||||
this._data.deleted = value;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get deleted flag
|
|
||||||
* @return {Boolean}
|
|
||||||
*/
|
|
||||||
getDeleted() {
|
|
||||||
return this._data.deleted;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set isPHD flag
|
|
||||||
* @param {Boolean} value isPHD value
|
|
||||||
* @return {ObjectMD}
|
|
||||||
*/
|
|
||||||
setIsPHD(value) {
|
|
||||||
this._data.isPHD = value;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get isPHD flag
|
|
||||||
* @return {Boolean}
|
|
||||||
*/
|
|
||||||
getIsPHD() {
|
|
||||||
return this._data.isPHD;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,94 +0,0 @@
|
||||||
/*
|
|
||||||
* Code based on Yutaka Oishi (Fujifilm) contributions
|
|
||||||
* Date: 11 Sep 2020
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* class representing the x-amz-restore of object metadata.
|
|
||||||
*
|
|
||||||
* @class
|
|
||||||
*/
|
|
||||||
export default class ObjectMDAmzRestore {
|
|
||||||
'expiry-date': Date | string;
|
|
||||||
'ongoing-request': boolean;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @constructor
|
|
||||||
* @param ongoingRequest ongoing-request
|
|
||||||
* @param [expiryDate] expiry-date
|
|
||||||
* @throws case of invalid parameter
|
|
||||||
*/
|
|
||||||
constructor(ongoingRequest: boolean, expiryDate?: Date | string) {
|
|
||||||
this.setOngoingRequest(ongoingRequest);
|
|
||||||
this.setExpiryDate(expiryDate);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param data archiveInfo
|
|
||||||
* @returns true if the provided object is valid
|
|
||||||
*/
|
|
||||||
static isValid(data: { 'ongoing-request': boolean; 'expiry-date': Date | string }) {
|
|
||||||
try {
|
|
||||||
// eslint-disable-next-line no-new
|
|
||||||
new ObjectMDAmzRestore(data['ongoing-request'], data['expiry-date']);
|
|
||||||
return true;
|
|
||||||
} catch (err) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @returns ongoing-request
|
|
||||||
*/
|
|
||||||
getOngoingRequest() {
|
|
||||||
return this['ongoing-request'];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param value ongoing-request
|
|
||||||
* @throws case of invalid parameter
|
|
||||||
*/
|
|
||||||
setOngoingRequest(value?: boolean) {
|
|
||||||
if (value === undefined) {
|
|
||||||
throw new Error('ongoing-request is required.');
|
|
||||||
} else if (typeof value !== 'boolean') {
|
|
||||||
throw new Error('ongoing-request must be type of boolean.');
|
|
||||||
}
|
|
||||||
this['ongoing-request'] = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @returns expiry-date
|
|
||||||
*/
|
|
||||||
getExpiryDate() {
|
|
||||||
return this['expiry-date'];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param value expiry-date
|
|
||||||
* @throws case of invalid parameter
|
|
||||||
*/
|
|
||||||
setExpiryDate(value?: Date | string) {
|
|
||||||
if (value) {
|
|
||||||
const checkWith = (new Date(value)).getTime();
|
|
||||||
if (Number.isNaN(Number(checkWith))) {
|
|
||||||
throw new Error('expiry-date is must be a valid Date.');
|
|
||||||
}
|
|
||||||
this['expiry-date'] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @returns itself
|
|
||||||
*/
|
|
||||||
getValue() {
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,184 +0,0 @@
|
||||||
/**
|
|
||||||
* class representing the archive of object metadata.
|
|
||||||
*
|
|
||||||
* @class
|
|
||||||
*/
|
|
||||||
export default class ObjectMDArchive {
|
|
||||||
archiveInfo: any;
|
|
||||||
// @ts-ignore
|
|
||||||
restoreRequestedAt: Date | string;
|
|
||||||
// @ts-ignore
|
|
||||||
restoreRequestedDays: number;
|
|
||||||
// @ts-ignore
|
|
||||||
restoreCompletedAt: Date | string;
|
|
||||||
// @ts-ignore
|
|
||||||
restoreWillExpireAt: Date | string;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @constructor
|
|
||||||
* @param archiveInfo contains the archive info set by the TLP and returned by the TLP jobs
|
|
||||||
* @param [restoreRequestedAt] set at the time restore request is made by the client
|
|
||||||
* @param [restoreRequestedDays] set at the time restore request is made by the client
|
|
||||||
* @param [restoreCompletedAt] set at the time of successful restore
|
|
||||||
* @param [restoreWillExpireAt] computed and stored at the time of restore
|
|
||||||
* @throws case of invalid parameter
|
|
||||||
*/
|
|
||||||
constructor(
|
|
||||||
archiveInfo: any,
|
|
||||||
restoreRequestedAt?: Date | string,
|
|
||||||
restoreRequestedDays?: number,
|
|
||||||
restoreCompletedAt?: Date | string,
|
|
||||||
restoreWillExpireAt?: Date | string,
|
|
||||||
) {
|
|
||||||
this.setArchiveInfo(archiveInfo);
|
|
||||||
this.setRestoreRequestedAt(restoreRequestedAt!);
|
|
||||||
this.setRestoreRequestedDays(restoreRequestedDays!);
|
|
||||||
this.setRestoreCompletedAt(restoreCompletedAt!);
|
|
||||||
this.setRestoreWillExpireAt(restoreWillExpireAt!);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param data archiveInfo
|
|
||||||
* @returns true if the provided object is valid
|
|
||||||
*/
|
|
||||||
static isValid(data: {
|
|
||||||
archiveInfo: any;
|
|
||||||
restoreRequestedAt?: Date;
|
|
||||||
restoreRequestedDays?: number;
|
|
||||||
restoreCompletedAt?: Date;
|
|
||||||
restoreWillExpireAt?: Date;
|
|
||||||
}) {
|
|
||||||
try {
|
|
||||||
// eslint-disable-next-line no-new
|
|
||||||
new ObjectMDArchive(
|
|
||||||
data.archiveInfo,
|
|
||||||
data.restoreRequestedAt,
|
|
||||||
data.restoreRequestedDays,
|
|
||||||
data.restoreCompletedAt,
|
|
||||||
data.restoreWillExpireAt,
|
|
||||||
);
|
|
||||||
return true;
|
|
||||||
} catch (err) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @returns archiveInfo
|
|
||||||
*/
|
|
||||||
getArchiveInfo() {
|
|
||||||
return this.archiveInfo;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param value archiveInfo
|
|
||||||
* @throws case of invalid parameter
|
|
||||||
*/
|
|
||||||
setArchiveInfo(value: any) {
|
|
||||||
if (!value) {
|
|
||||||
throw new Error('archiveInfo is required.');
|
|
||||||
} else if (typeof value !== 'object') {
|
|
||||||
throw new Error('archiveInfo must be type of object.');
|
|
||||||
}
|
|
||||||
this.archiveInfo = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @returns restoreRequestedAt
|
|
||||||
*/
|
|
||||||
getRestoreRequestedAt() {
|
|
||||||
return this.restoreRequestedAt;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @param value restoreRequestedAt
|
|
||||||
* @throws case of invalid parameter
|
|
||||||
*/
|
|
||||||
setRestoreRequestedAt(value: Date | string) {
|
|
||||||
if (value) {
|
|
||||||
const checkWith = (new Date(value)).getTime();
|
|
||||||
if (Number.isNaN(Number(checkWith))) {
|
|
||||||
throw new Error('restoreRequestedAt must be a valid Date.');
|
|
||||||
}
|
|
||||||
this.restoreRequestedAt = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @returns restoreRequestedDays
|
|
||||||
*/
|
|
||||||
getRestoreRequestedDays() {
|
|
||||||
return this.restoreRequestedDays;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @param value restoreRequestedDays
|
|
||||||
* @throws case of invalid parameter
|
|
||||||
*/
|
|
||||||
setRestoreRequestedDays(value: number) {
|
|
||||||
if (value) {
|
|
||||||
if (isNaN(value)) {
|
|
||||||
throw new Error('restoreRequestedDays must be type of Number.');
|
|
||||||
}
|
|
||||||
this.restoreRequestedDays = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @returns restoreCompletedAt
|
|
||||||
*/
|
|
||||||
getRestoreCompletedAt() {
|
|
||||||
return this.restoreCompletedAt;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @param value restoreCompletedAt
|
|
||||||
* @throws case of invalid parameter
|
|
||||||
*/
|
|
||||||
setRestoreCompletedAt(value: Date | string) {
|
|
||||||
if (value) {
|
|
||||||
if (!this.restoreRequestedAt || !this.restoreRequestedDays) {
|
|
||||||
throw new Error('restoreCompletedAt must be set after restoreRequestedAt and restoreRequestedDays.');
|
|
||||||
}
|
|
||||||
const checkWith = (new Date(value)).getTime();
|
|
||||||
if (Number.isNaN(Number(checkWith))) {
|
|
||||||
throw new Error('restoreCompletedAt must be a valid Date.');
|
|
||||||
}
|
|
||||||
this.restoreCompletedAt = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @returns restoreWillExpireAt
|
|
||||||
*/
|
|
||||||
getRestoreWillExpireAt() {
|
|
||||||
return this.restoreWillExpireAt;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* @param value restoreWillExpireAt
|
|
||||||
* @throws case of invalid parameter
|
|
||||||
*/
|
|
||||||
setRestoreWillExpireAt(value: Date | string) {
|
|
||||||
if (value) {
|
|
||||||
if (!this.restoreRequestedAt || !this.restoreRequestedDays) {
|
|
||||||
throw new Error('restoreWillExpireAt must be set after restoreRequestedAt and restoreRequestedDays.');
|
|
||||||
}
|
|
||||||
const checkWith = (new Date(value)).getTime();
|
|
||||||
if (Number.isNaN(Number(checkWith))) {
|
|
||||||
throw new Error('restoreWillExpireAt must be a valid Date.');
|
|
||||||
}
|
|
||||||
this.restoreWillExpireAt = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @returns itself
|
|
||||||
*/
|
|
||||||
getValue() {
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,188 +0,0 @@
|
||||||
/**
|
|
||||||
* Helper class to ease access to the Azure specific information for
|
|
||||||
* Blob and Container objects.
|
|
||||||
*/
|
|
||||||
export default class ObjectMDAzureInfo {
|
|
||||||
_data: {
|
|
||||||
containerPublicAccess: string;
|
|
||||||
containerStoredAccessPolicies: any[];
|
|
||||||
containerImmutabilityPolicy: any;
|
|
||||||
containerLegalHoldStatus: boolean;
|
|
||||||
containerDeletionInProgress: boolean;
|
|
||||||
blobType: string;
|
|
||||||
blobContentMD5: string;
|
|
||||||
blobIssuedETag: string;
|
|
||||||
blobCopyInfo: any;
|
|
||||||
blobSequenceNumber: number;
|
|
||||||
blobAccessTierChangeTime: Date;
|
|
||||||
blobUncommitted: boolean;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @param obj - Raw structure for the Azure info on Blob/Container
|
|
||||||
* @param obj.containerPublicAccess - Public access authorization
|
|
||||||
* type
|
|
||||||
* @param obj.containerStoredAccessPolicies - Access policies
|
|
||||||
* for Shared Access Signature bearer
|
|
||||||
* @param obj.containerImmutabilityPolicy - data immutability
|
|
||||||
* policy for this container
|
|
||||||
* @param obj.containerLegalHoldStatus - legal hold status for
|
|
||||||
* this container
|
|
||||||
* @param obj.containerDeletionInProgress - deletion in progress
|
|
||||||
* indicator for this container
|
|
||||||
* @param obj.blobType - defines the type of blob for this object
|
|
||||||
* @param obj.blobContentMD5 - whole object MD5 sum set by the
|
|
||||||
* client through the Azure API
|
|
||||||
* @param obj.blobIssuedETag - backup of the issued ETag on MD only
|
|
||||||
* operations like Set Blob Properties and Set Blob Metadata
|
|
||||||
* @param obj.blobCopyInfo - information pertaining to past and
|
|
||||||
* pending copy operation targeting this object
|
|
||||||
* @param obj.blobSequenceNumber - sequence number for a PageBlob
|
|
||||||
* @param obj.blobAccessTierChangeTime - date of change of tier
|
|
||||||
* @param obj.blobUncommitted - A block has been put for a
|
|
||||||
* nonexistent blob which is about to be created
|
|
||||||
*/
|
|
||||||
constructor(obj: {
|
|
||||||
containerPublicAccess: string;
|
|
||||||
containerStoredAccessPolicies: any[];
|
|
||||||
containerImmutabilityPolicy: any;
|
|
||||||
containerLegalHoldStatus: boolean;
|
|
||||||
containerDeletionInProgress: boolean;
|
|
||||||
blobType: string;
|
|
||||||
blobContentMD5: string;
|
|
||||||
blobIssuedETag: string;
|
|
||||||
blobCopyInfo: any;
|
|
||||||
blobSequenceNumber: number;
|
|
||||||
blobAccessTierChangeTime: Date;
|
|
||||||
blobUncommitted: boolean;
|
|
||||||
}) {
|
|
||||||
this._data = {
|
|
||||||
containerPublicAccess: obj.containerPublicAccess,
|
|
||||||
containerStoredAccessPolicies: obj.containerStoredAccessPolicies,
|
|
||||||
containerImmutabilityPolicy: obj.containerImmutabilityPolicy,
|
|
||||||
containerLegalHoldStatus: obj.containerLegalHoldStatus,
|
|
||||||
containerDeletionInProgress: obj.containerDeletionInProgress,
|
|
||||||
blobType: obj.blobType,
|
|
||||||
blobContentMD5: obj.blobContentMD5,
|
|
||||||
blobIssuedETag: obj.blobIssuedETag,
|
|
||||||
blobCopyInfo: obj.blobCopyInfo,
|
|
||||||
blobSequenceNumber: obj.blobSequenceNumber,
|
|
||||||
blobAccessTierChangeTime: obj.blobAccessTierChangeTime,
|
|
||||||
blobUncommitted: obj.blobUncommitted,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
getContainerPublicAccess() {
|
|
||||||
return this._data.containerPublicAccess;
|
|
||||||
}
|
|
||||||
|
|
||||||
setContainerPublicAccess(containerPublicAccess: string) {
|
|
||||||
this._data.containerPublicAccess = containerPublicAccess;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getContainerStoredAccessPolicies() {
|
|
||||||
return this._data.containerStoredAccessPolicies;
|
|
||||||
}
|
|
||||||
|
|
||||||
setContainerStoredAccessPolicies(containerStoredAccessPolicies: any[]) {
|
|
||||||
this._data.containerStoredAccessPolicies =
|
|
||||||
containerStoredAccessPolicies;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getContainerImmutabilityPolicy() {
|
|
||||||
return this._data.containerImmutabilityPolicy;
|
|
||||||
}
|
|
||||||
|
|
||||||
setContainerImmutabilityPolicy(containerImmutabilityPolicy: any) {
|
|
||||||
this._data.containerImmutabilityPolicy = containerImmutabilityPolicy;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getContainerLegalHoldStatus() {
|
|
||||||
return this._data.containerLegalHoldStatus;
|
|
||||||
}
|
|
||||||
|
|
||||||
setContainerLegalHoldStatus(containerLegalHoldStatus: boolean) {
|
|
||||||
this._data.containerLegalHoldStatus = containerLegalHoldStatus;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getContainerDeletionInProgress() {
|
|
||||||
return this._data.containerDeletionInProgress;
|
|
||||||
}
|
|
||||||
|
|
||||||
setContainerDeletionInProgress(containerDeletionInProgress: boolean) {
|
|
||||||
this._data.containerDeletionInProgress = containerDeletionInProgress;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getBlobType() {
|
|
||||||
return this._data.blobType;
|
|
||||||
}
|
|
||||||
|
|
||||||
setBlobType(blobType: string) {
|
|
||||||
this._data.blobType = blobType;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getBlobContentMD5() {
|
|
||||||
return this._data.blobContentMD5;
|
|
||||||
}
|
|
||||||
|
|
||||||
setBlobContentMD5(blobContentMD5: string) {
|
|
||||||
this._data.blobContentMD5 = blobContentMD5;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getBlobIssuedETag() {
|
|
||||||
return this._data.blobIssuedETag;
|
|
||||||
}
|
|
||||||
|
|
||||||
setBlobIssuedETag(blobIssuedETag: string) {
|
|
||||||
this._data.blobIssuedETag = blobIssuedETag;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getBlobCopyInfo() {
|
|
||||||
return this._data.blobCopyInfo;
|
|
||||||
}
|
|
||||||
|
|
||||||
setBlobCopyInfo(blobCopyInfo: any) {
|
|
||||||
this._data.blobCopyInfo = blobCopyInfo;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getBlobSequenceNumber() {
|
|
||||||
return this._data.blobSequenceNumber;
|
|
||||||
}
|
|
||||||
|
|
||||||
setBlobSequenceNumber(blobSequenceNumber: number) {
|
|
||||||
this._data.blobSequenceNumber = blobSequenceNumber;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getBlobAccessTierChangeTime() {
|
|
||||||
return this._data.blobAccessTierChangeTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
setBlobAccessTierChangeTime(blobAccessTierChangeTime: Date) {
|
|
||||||
this._data.blobAccessTierChangeTime = blobAccessTierChangeTime;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getBlobUncommitted() {
|
|
||||||
return this._data.blobUncommitted;
|
|
||||||
}
|
|
||||||
|
|
||||||
setBlobUncommitted(blobUncommitted: boolean) {
|
|
||||||
this._data.blobUncommitted = blobUncommitted;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getValue() {
|
|
||||||
return this._data;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -5,7 +5,6 @@ export type Location = BaseLocation & {
|
||||||
size: number;
|
size: number;
|
||||||
dataStoreETag: string;
|
dataStoreETag: string;
|
||||||
dataStoreVersionId: string;
|
dataStoreVersionId: string;
|
||||||
blockId?: string;
|
|
||||||
};
|
};
|
||||||
export type ObjectMDLocationData = {
|
export type ObjectMDLocationData = {
|
||||||
key: string;
|
key: string;
|
||||||
|
@ -13,8 +12,6 @@ export type ObjectMDLocationData = {
|
||||||
size: number;
|
size: number;
|
||||||
dataStoreName: string;
|
dataStoreName: string;
|
||||||
dataStoreETag: string;
|
dataStoreETag: string;
|
||||||
dataStoreVersionId: string;
|
|
||||||
blockId?: string;
|
|
||||||
cryptoScheme?: number;
|
cryptoScheme?: number;
|
||||||
cipheredDataKey?: string;
|
cipheredDataKey?: string;
|
||||||
};
|
};
|
||||||
|
@ -34,14 +31,10 @@ export default class ObjectMDLocation {
|
||||||
* @param locationObj.dataStoreName - type of data store
|
* @param locationObj.dataStoreName - type of data store
|
||||||
* @param locationObj.dataStoreETag - internal ETag of
|
* @param locationObj.dataStoreETag - internal ETag of
|
||||||
* data part
|
* data part
|
||||||
* @param [locationObj.dataStoreVersionId] - versionId,
|
|
||||||
* needed for cloud backends
|
|
||||||
* @param [location.cryptoScheme] - if location data is
|
* @param [location.cryptoScheme] - if location data is
|
||||||
* encrypted: the encryption scheme version
|
* encrypted: the encryption scheme version
|
||||||
* @param [location.cipheredDataKey] - if location data
|
* @param [location.cipheredDataKey] - if location data
|
||||||
* is encrypted: the base64-encoded ciphered data key
|
* is encrypted: the base64-encoded ciphered data key
|
||||||
* @param [locationObj.blockId] - blockId of the part,
|
|
||||||
* set by the Azure Blob Service REST API frontend
|
|
||||||
*/
|
*/
|
||||||
constructor(locationObj: Location | (Location & Ciphered)) {
|
constructor(locationObj: Location | (Location & Ciphered)) {
|
||||||
this._data = {
|
this._data = {
|
||||||
|
@ -50,8 +43,6 @@ export default class ObjectMDLocation {
|
||||||
size: locationObj.size,
|
size: locationObj.size,
|
||||||
dataStoreName: locationObj.dataStoreName,
|
dataStoreName: locationObj.dataStoreName,
|
||||||
dataStoreETag: locationObj.dataStoreETag,
|
dataStoreETag: locationObj.dataStoreETag,
|
||||||
dataStoreVersionId: locationObj.dataStoreVersionId,
|
|
||||||
blockId: locationObj.blockId,
|
|
||||||
};
|
};
|
||||||
if ('cryptoScheme' in locationObj) {
|
if ('cryptoScheme' in locationObj) {
|
||||||
this._data.cryptoScheme = locationObj.cryptoScheme;
|
this._data.cryptoScheme = locationObj.cryptoScheme;
|
||||||
|
@ -73,7 +64,6 @@ export default class ObjectMDLocation {
|
||||||
* @param location - single data location info
|
* @param location - single data location info
|
||||||
* @param location.key - data backend key
|
* @param location.key - data backend key
|
||||||
* @param location.dataStoreName - type of data store
|
* @param location.dataStoreName - type of data store
|
||||||
* @param [location.dataStoreVersionId] - data backend version ID
|
|
||||||
* @param [location.cryptoScheme] - if location data is
|
* @param [location.cryptoScheme] - if location data is
|
||||||
* encrypted: the encryption scheme version
|
* encrypted: the encryption scheme version
|
||||||
* @param [location.cipheredDataKey] - if location data
|
* @param [location.cipheredDataKey] - if location data
|
||||||
|
@ -81,19 +71,15 @@ export default class ObjectMDLocation {
|
||||||
* @return return this
|
* @return return this
|
||||||
*/
|
*/
|
||||||
setDataLocation(location: BaseLocation | (BaseLocation & Ciphered)) {
|
setDataLocation(location: BaseLocation | (BaseLocation & Ciphered)) {
|
||||||
[
|
['key', 'dataStoreName', 'cryptoScheme', 'cipheredDataKey'].forEach(
|
||||||
'key',
|
(attrName) => {
|
||||||
'dataStoreName',
|
|
||||||
'dataStoreVersionId',
|
|
||||||
'cryptoScheme',
|
|
||||||
'cipheredDataKey',
|
|
||||||
].forEach(attrName => {
|
|
||||||
if (location[attrName] !== undefined) {
|
if (location[attrName] !== undefined) {
|
||||||
this._data[attrName] = location[attrName];
|
this._data[attrName] = location[attrName];
|
||||||
} else {
|
} else {
|
||||||
delete this._data[attrName];
|
delete this._data[attrName];
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
);
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -101,10 +87,6 @@ export default class ObjectMDLocation {
|
||||||
return this._data.dataStoreETag;
|
return this._data.dataStoreETag;
|
||||||
}
|
}
|
||||||
|
|
||||||
getDataStoreVersionId() {
|
|
||||||
return this._data.dataStoreVersionId;
|
|
||||||
}
|
|
||||||
|
|
||||||
getPartNumber() {
|
getPartNumber() {
|
||||||
return Number.parseInt(this._data.dataStoreETag.split(':')[0], 10);
|
return Number.parseInt(this._data.dataStoreETag.split(':')[0], 10);
|
||||||
}
|
}
|
||||||
|
@ -139,15 +121,6 @@ export default class ObjectMDLocation {
|
||||||
return this._data.cipheredDataKey;
|
return this._data.cipheredDataKey;
|
||||||
}
|
}
|
||||||
|
|
||||||
getBlockId() {
|
|
||||||
return this._data.blockId;
|
|
||||||
}
|
|
||||||
|
|
||||||
setBlockId(blockId: string) {
|
|
||||||
this._data.blockId = blockId;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
getValue() {
|
getValue() {
|
||||||
return this._data;
|
return this._data;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import UUID from 'uuid';
|
import UUID from 'uuid';
|
||||||
|
|
||||||
import { RequestLogger } from 'werelogs';
|
|
||||||
|
|
||||||
import escapeForXml from '../s3middleware/escapeForXml';
|
import escapeForXml from '../s3middleware/escapeForXml';
|
||||||
import errors from '../errors';
|
import errors from '../errors';
|
||||||
import { isValidBucketName } from '../s3routes/routesUtils';
|
import { isValidBucketName } from '../s3routes/routesUtils';
|
||||||
|
@ -64,8 +62,7 @@ export default class ReplicationConfiguration {
|
||||||
_destination: string | null;
|
_destination: string | null;
|
||||||
_rules: Rule[] | null;
|
_rules: Rule[] | null;
|
||||||
_prevStorageClass: null;
|
_prevStorageClass: null;
|
||||||
_hasScalityDestination: boolean | null;
|
_hasScalityDestination: boolean;
|
||||||
_preferredReadLocation: string | null;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a ReplicationConfiguration instance
|
* Create a ReplicationConfiguration instance
|
||||||
|
@ -87,8 +84,7 @@ export default class ReplicationConfiguration {
|
||||||
this._destination = null;
|
this._destination = null;
|
||||||
this._rules = null;
|
this._rules = null;
|
||||||
this._prevStorageClass = null;
|
this._prevStorageClass = null;
|
||||||
this._hasScalityDestination = null;
|
this._hasScalityDestination = false;
|
||||||
this._preferredReadLocation = null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -115,18 +111,6 @@ export default class ReplicationConfiguration {
|
||||||
return this._rules;
|
return this._rules;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* The preferred read location
|
|
||||||
* @return {string|null} - The preferred read location if defined,
|
|
||||||
* otherwise null
|
|
||||||
*
|
|
||||||
* FIXME ideally we should be able to specify one preferred read
|
|
||||||
* location for each rule
|
|
||||||
*/
|
|
||||||
getPreferredReadLocation() {
|
|
||||||
return this._preferredReadLocation;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the replication configuration
|
* Get the replication configuration
|
||||||
* @return - The replication configuration
|
* @return - The replication configuration
|
||||||
|
@ -136,7 +120,6 @@ export default class ReplicationConfiguration {
|
||||||
role: this.getRole(),
|
role: this.getRole(),
|
||||||
destination: this.getDestination(),
|
destination: this.getDestination(),
|
||||||
rules: this.getRules(),
|
rules: this.getRules(),
|
||||||
preferredReadLocation: this.getPreferredReadLocation(),
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -343,15 +326,7 @@ export default class ReplicationConfiguration {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
const storageClasses = destination.StorageClass[0].split(',');
|
const storageClasses = destination.StorageClass[0].split(',');
|
||||||
const prefReadIndex = storageClasses.findIndex(storageClass =>
|
const isValidStorageClass = storageClasses.every((storageClass) => {
|
||||||
storageClass.endsWith(':preferred_read'));
|
|
||||||
if (prefReadIndex !== -1) {
|
|
||||||
const prefRead = storageClasses[prefReadIndex].split(':')[0];
|
|
||||||
// remove :preferred_read tag from storage class name
|
|
||||||
storageClasses[prefReadIndex] = prefRead;
|
|
||||||
this._preferredReadLocation = prefRead;
|
|
||||||
}
|
|
||||||
const isValidStorageClass = storageClasses.every(storageClass => {
|
|
||||||
if (validStorageClasses.includes(storageClass)) {
|
if (validStorageClasses.includes(storageClass)) {
|
||||||
this._hasScalityDestination =
|
this._hasScalityDestination =
|
||||||
defaultEndpoint.type === undefined;
|
defaultEndpoint.type === undefined;
|
||||||
|
@ -361,11 +336,6 @@ export default class ReplicationConfiguration {
|
||||||
(endpoint: any) => endpoint.site === storageClass
|
(endpoint: any) => endpoint.site === storageClass
|
||||||
);
|
);
|
||||||
if (endpoint) {
|
if (endpoint) {
|
||||||
// We do not support replication to cold location.
|
|
||||||
// Only transition to cold location is supported.
|
|
||||||
if (endpoint.site && this._config.locationConstraints[endpoint.site]?.isCold) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
// If this._hasScalityDestination was not set to true in any
|
// If this._hasScalityDestination was not set to true in any
|
||||||
// previous iteration or by a prior rule's storage class, then
|
// previous iteration or by a prior rule's storage class, then
|
||||||
// check if the current endpoint is a Scality destination.
|
// check if the current endpoint is a Scality destination.
|
||||||
|
|
|
@ -1,16 +1,11 @@
|
||||||
export { default as ARN } from './ARN';
|
export { default as ARN } from './ARN';
|
||||||
export { default as BackendInfo } from './BackendInfo';
|
|
||||||
export { default as BucketAzureInfo } from './BucketAzureInfo';
|
|
||||||
export { default as BucketInfo } from './BucketInfo';
|
export { default as BucketInfo } from './BucketInfo';
|
||||||
export { default as BucketPolicy } from './BucketPolicy';
|
export { default as ObjectMD } from './ObjectMD';
|
||||||
|
export { default as ObjectMDLocation } from './ObjectMDLocation';
|
||||||
|
export * as WebsiteConfiguration from './WebsiteConfiguration';
|
||||||
|
export { default as ReplicationConfiguration } from './ReplicationConfiguration';
|
||||||
export { default as LifecycleConfiguration } from './LifecycleConfiguration';
|
export { default as LifecycleConfiguration } from './LifecycleConfiguration';
|
||||||
export { default as LifecycleRule } from './LifecycleRule';
|
export { default as LifecycleRule } from './LifecycleRule';
|
||||||
export { default as NotificationConfiguration } from './NotificationConfiguration';
|
export { default as BucketPolicy } from './BucketPolicy';
|
||||||
export { default as ObjectLockConfiguration } from './ObjectLockConfiguration';
|
export { default as ObjectLockConfiguration } from './ObjectLockConfiguration';
|
||||||
export { default as ObjectMD } from './ObjectMD';
|
export { default as NotificationConfiguration } from './NotificationConfiguration';
|
||||||
export { default as ObjectMDAmzRestore } from './ObjectMDAmzRestore';
|
|
||||||
export { default as ObjectMDArchive } from './ObjectMDArchive';
|
|
||||||
export { default as ObjectMDAzureInfo } from './ObjectMDAzureInfo';
|
|
||||||
export { default as ObjectMDLocation } from './ObjectMDLocation';
|
|
||||||
export { default as ReplicationConfiguration } from './ReplicationConfiguration';
|
|
||||||
export * as WebsiteConfiguration from './WebsiteConfiguration';
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
import * as https from 'https';
|
import * as https from 'https';
|
||||||
import { https as HttpsAgent } from 'httpagent';
|
|
||||||
import * as tls from 'tls';
|
import * as tls from 'tls';
|
||||||
import * as net from 'net';
|
import * as net from 'net';
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
@ -372,8 +371,6 @@ export default class Server {
|
||||||
error: err.stack || err,
|
error: err.stack || err,
|
||||||
address: sock.address(),
|
address: sock.address(),
|
||||||
});
|
});
|
||||||
// socket is not systematically destroyed
|
|
||||||
sock.destroy();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -410,11 +407,7 @@ export default class Server {
|
||||||
method: 'arsenal.network.Server.start',
|
method: 'arsenal.network.Server.start',
|
||||||
port: this._port,
|
port: this._port,
|
||||||
});
|
});
|
||||||
this._https.agent = new HttpsAgent.Agent(this._https, {
|
this._https.agent = new https.Agent(this._https);
|
||||||
// Do not enforce the maximum number of sockets for the
|
|
||||||
// main server, as it might be able to serve more clients.
|
|
||||||
maxSockets: false,
|
|
||||||
});
|
|
||||||
this._server = https.createServer(this._https,
|
this._server = https.createServer(this._https,
|
||||||
(req, res) => this._onRequest(req, res));
|
(req, res) => this._onRequest(req, res));
|
||||||
} else {
|
} else {
|
||||||
|
@ -435,6 +428,7 @@ export default class Server {
|
||||||
this._server.on('connection', sock => {
|
this._server.on('connection', sock => {
|
||||||
// Setting no delay of the socket to the value configured
|
// Setting no delay of the socket to the value configured
|
||||||
// TODO fix this
|
// TODO fix this
|
||||||
|
// @ts-expect-errors
|
||||||
sock.setNoDelay(this.isNoDelay());
|
sock.setNoDelay(this.isNoDelay());
|
||||||
sock.on('error', err => this._logger.info(
|
sock.on('error', err => this._logger.info(
|
||||||
'socket error - request rejected', { error: err }));
|
'socket error - request rejected', { error: err }));
|
||||||
|
|
|
@ -3,12 +3,10 @@ import * as utils from './http/utils';
|
||||||
import RESTServer from './rest/RESTServer';
|
import RESTServer from './rest/RESTServer';
|
||||||
import RESTClient from './rest/RESTClient';
|
import RESTClient from './rest/RESTClient';
|
||||||
import * as ProbeServer from './probe/ProbeServer';
|
import * as ProbeServer from './probe/ProbeServer';
|
||||||
import HealthProbeServer from './probe/HealthProbeServer';
|
|
||||||
import * as Utils from './probe/Utils';
|
|
||||||
|
|
||||||
export const http = { server, utils };
|
export const http = { server, utils };
|
||||||
export const rest = { RESTServer, RESTClient };
|
export const rest = { RESTServer, RESTClient };
|
||||||
export const probe = { ProbeServer, HealthProbeServer, Utils };
|
export const probe = { ProbeServer };
|
||||||
|
|
||||||
export { default as RoundRobin } from './RoundRobin';
|
export { default as RoundRobin } from './RoundRobin';
|
||||||
export { default as kmip } from './kmip';
|
export { default as kmip } from './kmip';
|
||||||
|
|
|
@ -20,7 +20,7 @@ function _ttlvPadVector(vec: any[]) {
|
||||||
return vec;
|
return vec;
|
||||||
}
|
}
|
||||||
|
|
||||||
function _throwError(logger: werelogs.Logger, msg: string, data?: LogDictionary) {
|
function _throwError(logger: werelogs.Logger, msg: string, data?: LogDictionnary) {
|
||||||
logger.error(msg, data);
|
logger.error(msg, data);
|
||||||
throw Error(msg);
|
throw Error(msg);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,94 +0,0 @@
|
||||||
import * as http from 'http';
|
|
||||||
import httpServer from '../http/server';
|
|
||||||
import * as werelogs from 'werelogs';
|
|
||||||
import errors from '../../errors';
|
|
||||||
import ZenkoMetrics from '../../metrics/ZenkoMetrics';
|
|
||||||
import { sendSuccess, sendError } from './Utils';
|
|
||||||
|
|
||||||
function checkStub(_log: any) {
|
|
||||||
// eslint-disable-line
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
export default class HealthProbeServer extends httpServer {
|
|
||||||
logging: werelogs.Logger;
|
|
||||||
_reqHandlers: { [key: string]: any };
|
|
||||||
_livenessCheck: (log: any) => boolean;
|
|
||||||
_readinessCheck: (log: any) => boolean;
|
|
||||||
|
|
||||||
constructor(params: {
|
|
||||||
port: number;
|
|
||||||
bindAddress: string;
|
|
||||||
livenessCheck?: (log: any) => boolean;
|
|
||||||
readinessCheck?: (log: any) => boolean;
|
|
||||||
}) {
|
|
||||||
const logging = new werelogs.Logger('HealthProbeServer');
|
|
||||||
super(params.port, logging);
|
|
||||||
this.logging = logging;
|
|
||||||
this.setBindAddress(params.bindAddress || 'localhost');
|
|
||||||
// hooking our request processing function by calling the
|
|
||||||
// parent's method for that
|
|
||||||
this.onRequest(this._onRequest);
|
|
||||||
this._reqHandlers = {
|
|
||||||
'/_/health/liveness': this._onLiveness.bind(this),
|
|
||||||
'/_/health/readiness': this._onReadiness.bind(this),
|
|
||||||
'/_/monitoring/metrics': this._onMetrics.bind(this),
|
|
||||||
};
|
|
||||||
this._livenessCheck = params.livenessCheck || checkStub;
|
|
||||||
this._readinessCheck = params.readinessCheck || checkStub;
|
|
||||||
}
|
|
||||||
|
|
||||||
onLiveCheck(f: (log: any) => boolean) {
|
|
||||||
this._livenessCheck = f;
|
|
||||||
}
|
|
||||||
|
|
||||||
onReadyCheck(f: (log: any) => boolean) {
|
|
||||||
this._readinessCheck = f;
|
|
||||||
}
|
|
||||||
|
|
||||||
_onRequest(req: http.IncomingMessage, res: http.ServerResponse) {
|
|
||||||
const log = this.logging.newRequestLogger();
|
|
||||||
log.debug('request received', { method: req.method, url: req.url });
|
|
||||||
|
|
||||||
if (req.method !== 'GET') {
|
|
||||||
sendError(res, log, errors.MethodNotAllowed);
|
|
||||||
} else if (req.url && req.url in this._reqHandlers) {
|
|
||||||
this._reqHandlers[req.url](req, res, log);
|
|
||||||
} else {
|
|
||||||
sendError(res, log, errors.InvalidURI);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_onLiveness(
|
|
||||||
_req: http.IncomingMessage,
|
|
||||||
res: http.ServerResponse,
|
|
||||||
log: werelogs.RequestLogger,
|
|
||||||
) {
|
|
||||||
if (this._livenessCheck(log)) {
|
|
||||||
sendSuccess(res, log);
|
|
||||||
} else {
|
|
||||||
sendError(res, log, errors.ServiceUnavailable);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
_onReadiness(
|
|
||||||
_req: http.IncomingMessage,
|
|
||||||
res: http.ServerResponse,
|
|
||||||
log: werelogs.RequestLogger,
|
|
||||||
) {
|
|
||||||
if (this._readinessCheck(log)) {
|
|
||||||
sendSuccess(res, log);
|
|
||||||
} else {
|
|
||||||
sendError(res, log, errors.ServiceUnavailable);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// expose metrics to Prometheus
|
|
||||||
async _onMetrics(_req: http.IncomingMessage, res: http.ServerResponse) {
|
|
||||||
const metrics = await ZenkoMetrics.asPrometheus();
|
|
||||||
res.writeHead(200, {
|
|
||||||
'Content-Type': ZenkoMetrics.asPrometheusContentType(),
|
|
||||||
});
|
|
||||||
res.end(metrics);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -4,19 +4,22 @@ import * as werelogs from 'werelogs';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
|
|
||||||
export const DEFAULT_LIVE_ROUTE = '/_/live';
|
export const DEFAULT_LIVE_ROUTE = '/_/live';
|
||||||
export const DEFAULT_READY_ROUTE = '/_/ready';
|
export const DEFAULT_READY_ROUTE = '/_/live';
|
||||||
export const DEFAULT_METRICS_ROUTE = '/metrics';
|
export const DEFAULT_METRICS_ROUTE = '/_/metrics';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* ProbeDelegate is used to handle probe checks.
|
* ProbeDelegate is used to determine if a probe is successful or
|
||||||
* You can sendSuccess and sendError from Utils to handle success
|
* if any errors are present.
|
||||||
* and failure conditions.
|
* If everything is working as intended, it is a no-op.
|
||||||
|
* Otherwise, return a string representing what is failing.
|
||||||
* @callback ProbeDelegate
|
* @callback ProbeDelegate
|
||||||
* @param res - HTTP response for writing
|
* @param res - HTTP response for writing
|
||||||
* @param log - Werelogs instance for logging if you choose to
|
* @param log - Werelogs instance for logging if you choose to
|
||||||
|
* @return String representing issues to report. An empty
|
||||||
|
* string or undefined is used to represent no issues.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
export type ProbeDelegate = (res: http.ServerResponse, log: werelogs.RequestLogger) => string | void
|
export type ProbeDelegate = (res: http.ServerResponse, log: RequestLogger) => string | void
|
||||||
|
|
||||||
export type ProbeServerParams = {
|
export type ProbeServerParams = {
|
||||||
port: number;
|
port: number;
|
||||||
|
@ -87,6 +90,12 @@ export class ProbeServer extends httpServer {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
this._handlers.get(req.url ?? '')?.(res, log);
|
const probeResponse = this._handlers.get(req.url!)!(res, log);
|
||||||
|
if (probeResponse !== undefined && probeResponse !== '') {
|
||||||
|
// Return an internal error with the response
|
||||||
|
errors.InternalError
|
||||||
|
.customizeDescription(probeResponse)
|
||||||
|
.writeResponse(res);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,49 +0,0 @@
|
||||||
import * as http from 'http';
|
|
||||||
|
|
||||||
import { RequestLogger } from 'werelogs';
|
|
||||||
|
|
||||||
import { ArsenalError } from '../../errors';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Send a successful HTTP response of 200 OK
|
|
||||||
* @param res - HTTP response for writing
|
|
||||||
* @param log - Werelogs instance for logging if you choose to
|
|
||||||
* @param [message] - Message to send as response, defaults to OK
|
|
||||||
*/
|
|
||||||
export function sendSuccess(
|
|
||||||
res: http.ServerResponse,
|
|
||||||
log: RequestLogger,
|
|
||||||
message = 'OK'
|
|
||||||
) {
|
|
||||||
log.debug('replying with success');
|
|
||||||
res.writeHead(200);
|
|
||||||
res.end(message);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Send an Arsenal Error response
|
|
||||||
* @param res - HTTP response for writing
|
|
||||||
* @param log - Werelogs instance for logging if you choose to
|
|
||||||
* @param error - Error to send back to the user
|
|
||||||
* @param [optMessage] - Message to use instead of the errors message
|
|
||||||
*/
|
|
||||||
export function sendError(
|
|
||||||
res: http.ServerResponse,
|
|
||||||
log: RequestLogger,
|
|
||||||
error: ArsenalError,
|
|
||||||
optMessage?: string
|
|
||||||
) {
|
|
||||||
const message = optMessage || error.description || '';
|
|
||||||
log.debug('sending back error response', {
|
|
||||||
httpCode: error.code,
|
|
||||||
errorType: error.message,
|
|
||||||
error: message,
|
|
||||||
});
|
|
||||||
res.writeHead(error.code);
|
|
||||||
res.end(
|
|
||||||
JSON.stringify({
|
|
||||||
errorType: error.message,
|
|
||||||
errorMessage: message,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
|
@ -4,7 +4,7 @@ import * as werelogs from 'werelogs';
|
||||||
import * as constants from '../../constants';
|
import * as constants from '../../constants';
|
||||||
import * as utils from './utils';
|
import * as utils from './utils';
|
||||||
import errors, { ArsenalError } from '../../errors';
|
import errors, { ArsenalError } from '../../errors';
|
||||||
import { http as HttpAgent } from 'httpagent';
|
import HttpAgent from 'agentkeepalive';
|
||||||
import * as stream from 'stream';
|
import * as stream from 'stream';
|
||||||
|
|
||||||
function setRequestUids(reqHeaders: http.IncomingHttpHeaders, reqUids: string) {
|
function setRequestUids(reqHeaders: http.IncomingHttpHeaders, reqUids: string) {
|
||||||
|
@ -71,9 +71,8 @@ function makeErrorFromHTTPResponse(response: http.IncomingMessage) {
|
||||||
export default class RESTClient {
|
export default class RESTClient {
|
||||||
host: string;
|
host: string;
|
||||||
port: number;
|
port: number;
|
||||||
httpAgent: http.Agent;
|
httpAgent: HttpAgent;
|
||||||
logging: werelogs.Logger;
|
logging: werelogs.Logger;
|
||||||
isPassthrough: boolean;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Interface to the data file server
|
* Interface to the data file server
|
||||||
|
@ -89,19 +88,17 @@ export default class RESTClient {
|
||||||
host: string;
|
host: string;
|
||||||
port: number;
|
port: number;
|
||||||
logApi: { Logger: typeof werelogs.Logger };
|
logApi: { Logger: typeof werelogs.Logger };
|
||||||
isPassthrough?: boolean;
|
|
||||||
}) {
|
}) {
|
||||||
assert(params.host);
|
assert(params.host);
|
||||||
assert(params.port);
|
assert(params.port);
|
||||||
|
|
||||||
this.host = params.host;
|
this.host = params.host;
|
||||||
this.port = params.port;
|
this.port = params.port;
|
||||||
this.isPassthrough = params.isPassthrough || false;
|
|
||||||
this.logging = new (params.logApi || werelogs).Logger('DataFileRESTClient');
|
this.logging = new (params.logApi || werelogs).Logger('DataFileRESTClient');
|
||||||
this.httpAgent = new HttpAgent.Agent({
|
this.httpAgent = new HttpAgent({
|
||||||
keepAlive: true,
|
keepAlive: true,
|
||||||
freeSocketTimeout: constants.httpClientFreeSocketTimeout,
|
freeSocketTimeout: constants.httpClientFreeSocketTimeout,
|
||||||
}) as http.Agent;
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Destroy the HTTP agent, forcing a close of the remaining open connections */
|
/** Destroy the HTTP agent, forcing a close of the remaining open connections */
|
||||||
|
@ -119,18 +116,16 @@ export default class RESTClient {
|
||||||
method: string,
|
method: string,
|
||||||
headers: http.OutgoingHttpHeaders | null,
|
headers: http.OutgoingHttpHeaders | null,
|
||||||
key: string | null,
|
key: string | null,
|
||||||
log: werelogs.RequestLogger,
|
log: RequestLogger,
|
||||||
responseCb: (res: http.IncomingMessage) => void,
|
responseCb: (res: http.IncomingMessage) => void,
|
||||||
) {
|
) {
|
||||||
const reqHeaders = headers || {};
|
const reqHeaders = headers || {};
|
||||||
const urlKey = key || '';
|
const urlKey = key || '';
|
||||||
const prefix = this.isPassthrough ?
|
|
||||||
constants.passthroughFileURL : constants.dataFileURL;
|
|
||||||
const reqParams = {
|
const reqParams = {
|
||||||
hostname: this.host,
|
hostname: this.host,
|
||||||
port: this.port,
|
port: this.port,
|
||||||
method,
|
method,
|
||||||
path: encodeURI(`${prefix}/${urlKey}`),
|
path: `${constants.dataFileURL}/${urlKey}`,
|
||||||
headers: reqHeaders,
|
headers: reqHeaders,
|
||||||
agent: this.httpAgent,
|
agent: this.httpAgent,
|
||||||
};
|
};
|
||||||
|
|
|
@ -4,7 +4,7 @@ import * as werelogs from 'werelogs';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
import httpServer from '../http/server';
|
import httpServer from '../http/server';
|
||||||
import * as constants from '../../constants';
|
import * as constants from '../../constants';
|
||||||
import { parseURL } from './utils';
|
import * as utils from './utils';
|
||||||
import * as httpUtils from '../http/utils';
|
import * as httpUtils from '../http/utils';
|
||||||
import errors, { ArsenalError } from '../../errors';
|
import errors, { ArsenalError } from '../../errors';
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ function setContentRange(
|
||||||
|
|
||||||
function sendError(
|
function sendError(
|
||||||
res: http.ServerResponse,
|
res: http.ServerResponse,
|
||||||
log: werelogs.RequestLogger,
|
log: RequestLogger,
|
||||||
error: ArsenalError,
|
error: ArsenalError,
|
||||||
optMessage?: string,
|
optMessage?: string,
|
||||||
) {
|
) {
|
||||||
|
@ -38,6 +38,42 @@ function sendError(
|
||||||
errorMessage: message })}\n`);
|
errorMessage: message })}\n`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse the given url and return a pathInfo object. Sanity checks are
|
||||||
|
* performed.
|
||||||
|
*
|
||||||
|
* @param urlStr - URL to parse
|
||||||
|
* @param expectKey - whether the command expects to see a
|
||||||
|
* key in the URL
|
||||||
|
* @return a pathInfo object with URL items containing the
|
||||||
|
* following attributes:
|
||||||
|
* - pathInfo.service {String} - The name of REST service ("DataFile")
|
||||||
|
* - pathInfo.key {String} - The requested key
|
||||||
|
*/
|
||||||
|
function parseURL(urlStr: string, expectKey: boolean) {
|
||||||
|
const urlObj = url.parse(urlStr);
|
||||||
|
const pathInfo = utils.explodePath(urlObj.path!);
|
||||||
|
if (pathInfo.service !== constants.dataFileURL) {
|
||||||
|
throw errors.InvalidAction.customizeDescription(
|
||||||
|
`unsupported service '${pathInfo.service}'`);
|
||||||
|
}
|
||||||
|
if (expectKey && pathInfo.key === undefined) {
|
||||||
|
throw errors.MissingParameter.customizeDescription(
|
||||||
|
'URL is missing key');
|
||||||
|
}
|
||||||
|
if (!expectKey && pathInfo.key !== undefined) {
|
||||||
|
// note: we may implement rewrite functionality by allowing a
|
||||||
|
// key in the URL, though we may still provide the new key in
|
||||||
|
// the Location header to keep immutability property and
|
||||||
|
// atomicity of the update (we would just remove the old
|
||||||
|
// object when the new one has been written entirely in this
|
||||||
|
// case, saving a request over an equivalent PUT + DELETE).
|
||||||
|
throw errors.InvalidURI.customizeDescription(
|
||||||
|
'PUT url cannot contain a key');
|
||||||
|
}
|
||||||
|
return pathInfo;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @class
|
* @class
|
||||||
* @classdesc REST Server interface
|
* @classdesc REST Server interface
|
||||||
|
@ -68,6 +104,7 @@ export default class RESTServer extends httpServer {
|
||||||
}) {
|
}) {
|
||||||
assert(params.port);
|
assert(params.port);
|
||||||
|
|
||||||
|
// @ts-expect-error
|
||||||
werelogs.configure({
|
werelogs.configure({
|
||||||
level: params.log.logLevel,
|
level: params.log.logLevel,
|
||||||
dump: params.log.dumpLevel,
|
dump: params.log.dumpLevel,
|
||||||
|
@ -141,7 +178,7 @@ export default class RESTServer extends httpServer {
|
||||||
_onPut(
|
_onPut(
|
||||||
req: http.IncomingMessage,
|
req: http.IncomingMessage,
|
||||||
res: http.ServerResponse,
|
res: http.ServerResponse,
|
||||||
log: werelogs.RequestLogger,
|
log: RequestLogger,
|
||||||
) {
|
) {
|
||||||
let size: number;
|
let size: number;
|
||||||
try {
|
try {
|
||||||
|
@ -183,7 +220,7 @@ export default class RESTServer extends httpServer {
|
||||||
_onGet(
|
_onGet(
|
||||||
req: http.IncomingMessage,
|
req: http.IncomingMessage,
|
||||||
res: http.ServerResponse,
|
res: http.ServerResponse,
|
||||||
log: werelogs.RequestLogger,
|
log: RequestLogger,
|
||||||
) {
|
) {
|
||||||
let pathInfo: ReturnType<typeof parseURL>;
|
let pathInfo: ReturnType<typeof parseURL>;
|
||||||
let rangeSpec: ReturnType<typeof httpUtils.parseRangeSpec> | undefined =
|
let rangeSpec: ReturnType<typeof httpUtils.parseRangeSpec> | undefined =
|
||||||
|
@ -266,7 +303,7 @@ export default class RESTServer extends httpServer {
|
||||||
_onDelete(
|
_onDelete(
|
||||||
req: http.IncomingMessage,
|
req: http.IncomingMessage,
|
||||||
res: http.ServerResponse,
|
res: http.ServerResponse,
|
||||||
log: werelogs.RequestLogger,
|
log: RequestLogger,
|
||||||
) {
|
) {
|
||||||
let pathInfo: ReturnType<typeof parseURL>;
|
let pathInfo: ReturnType<typeof parseURL>;
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -1,16 +1,6 @@
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import * as constants from '../../constants';
|
|
||||||
import * as url from 'url';
|
|
||||||
const passthroughPrefixLength = constants.passthroughFileURL.length;
|
|
||||||
|
|
||||||
export function explodePath(path: string) {
|
export function explodePath(path: string) {
|
||||||
if (path.startsWith(constants.passthroughFileURL)) {
|
|
||||||
const key = path.slice(passthroughPrefixLength + 1);
|
|
||||||
return {
|
|
||||||
service: constants.passthroughFileURL,
|
|
||||||
key: key.length > 0 ? key : undefined,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
const pathMatch = /^(\/[a-zA-Z0-9]+)(\/([0-9a-f]*))?$/.exec(path);
|
const pathMatch = /^(\/[a-zA-Z0-9]+)(\/([0-9a-f]*))?$/.exec(path);
|
||||||
if (pathMatch) {
|
if (pathMatch) {
|
||||||
return {
|
return {
|
||||||
|
@ -20,41 +10,4 @@ export function explodePath(path: string) {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
throw errors.InvalidURI.customizeDescription('malformed URI');
|
throw errors.InvalidURI.customizeDescription('malformed URI');
|
||||||
}
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* Parse the given url and return a pathInfo object. Sanity checks are
|
|
||||||
* performed.
|
|
||||||
*
|
|
||||||
* @param urlStr - URL to parse
|
|
||||||
* @param expectKey - whether the command expects to see a
|
|
||||||
* key in the URL
|
|
||||||
* @return a pathInfo object with URL items containing the
|
|
||||||
* following attributes:
|
|
||||||
* - pathInfo.service {String} - The name of REST service ("DataFile")
|
|
||||||
* - pathInfo.key {String} - The requested key
|
|
||||||
*/
|
|
||||||
export function parseURL(urlStr: string, expectKey: boolean) {
|
|
||||||
const urlObj = url.parse(urlStr);
|
|
||||||
const pathInfo = explodePath(decodeURI(urlObj.path!));
|
|
||||||
if ((pathInfo.service !== constants.dataFileURL)
|
|
||||||
&& (pathInfo.service !== constants.passthroughFileURL)) {
|
|
||||||
throw errors.InvalidAction.customizeDescription(
|
|
||||||
`unsupported service '${pathInfo.service}'`);
|
|
||||||
}
|
|
||||||
if (expectKey && pathInfo.key === undefined) {
|
|
||||||
throw errors.MissingParameter.customizeDescription(
|
|
||||||
'URL is missing key');
|
|
||||||
}
|
|
||||||
if (!expectKey && pathInfo.key !== undefined) {
|
|
||||||
// note: we may implement rewrite functionality by allowing a
|
|
||||||
// key in the URL, though we may still provide the new key in
|
|
||||||
// the Location header to keep immutability property and
|
|
||||||
// atomicity of the update (we would just remove the old
|
|
||||||
// object when the new one has been written entirely in this
|
|
||||||
// case, saving a request over an equivalent PUT + DELETE).
|
|
||||||
throw errors.InvalidURI.customizeDescription(
|
|
||||||
'PUT url cannot contain a key');
|
|
||||||
}
|
|
||||||
return pathInfo;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import ioClient from 'socket.io-client';
|
import ioClient from 'socket.io-client';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
import { Server as IOServer } from 'socket.io';
|
import io from 'socket.io';
|
||||||
import * as sioStream from './sio-stream';
|
import * as sioStream from './sio-stream';
|
||||||
import async from 'async';
|
import async from 'async';
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
@ -497,7 +497,7 @@ export function RPCServer(params: {
|
||||||
assert(params.logger);
|
assert(params.logger);
|
||||||
|
|
||||||
const httpServer = http.createServer();
|
const httpServer = http.createServer();
|
||||||
const server = new IOServer(httpServer, { maxHttpBufferSize: 1e8 });
|
const server = io(httpServer);
|
||||||
const log = params.logger;
|
const log = params.logger;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -508,7 +508,7 @@ export function RPCServer(params: {
|
||||||
*
|
*
|
||||||
* @param {BaseService} serviceList - list of services to register
|
* @param {BaseService} serviceList - list of services to register
|
||||||
*/
|
*/
|
||||||
(server as any).registerServices = function registerServices(...serviceList: any[]) {
|
server.registerServices = function registerServices(...serviceList: any[]) {
|
||||||
serviceList.forEach(service => {
|
serviceList.forEach(service => {
|
||||||
const sock = this.of(service.namespace);
|
const sock = this.of(service.namespace);
|
||||||
sock.on('connection', conn => {
|
sock.on('connection', conn => {
|
||||||
|
@ -536,7 +536,7 @@ export function RPCServer(params: {
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
(server as any).listen = function listen(port, bindAddress = undefined) {
|
server.listen = function listen(port, bindAddress = undefined) {
|
||||||
httpServer.listen(port, bindAddress);
|
httpServer.listen(port, bindAddress);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,209 +0,0 @@
|
||||||
import { URL } from 'url';
|
|
||||||
import { decryptSecret } from '../executables/pensieveCreds/utils';
|
|
||||||
import { Logger } from 'werelogs';
|
|
||||||
|
|
||||||
export type LocationType =
|
|
||||||
| 'location-mem-v1'
|
|
||||||
| 'location-file-v1'
|
|
||||||
| 'location-azure-v1'
|
|
||||||
| 'location-ceph-radosgw-s3-v1'
|
|
||||||
| 'location-scality-ring-s3-v1'
|
|
||||||
| 'location-aws-s3-v1'
|
|
||||||
| 'location-wasabi-v1'
|
|
||||||
| 'location-do-spaces-v1'
|
|
||||||
| 'location-gcp-v1'
|
|
||||||
| 'location-scality-sproxyd-v1'
|
|
||||||
| 'location-nfs-mount-v1'
|
|
||||||
| 'location-scality-hdclient-v2';
|
|
||||||
|
|
||||||
export interface OverlayLocations {
|
|
||||||
[key: string]: {
|
|
||||||
name: string;
|
|
||||||
objectId: string;
|
|
||||||
details?: any;
|
|
||||||
locationType: string;
|
|
||||||
sizeLimitGB?: number;
|
|
||||||
isTransient?: boolean;
|
|
||||||
legacyAwsBehavior?: boolean;
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export type Location = {
|
|
||||||
type:
|
|
||||||
| 'mem'
|
|
||||||
| 'file'
|
|
||||||
| 'azure'
|
|
||||||
| 'aws_s3'
|
|
||||||
| 'gcp'
|
|
||||||
| 'scality'
|
|
||||||
| 'pfs'
|
|
||||||
| 'scality';
|
|
||||||
name: string;
|
|
||||||
objectId: string;
|
|
||||||
details: { [key: string]: any };
|
|
||||||
locationType: string;
|
|
||||||
sizeLimitGB: number | null;
|
|
||||||
isTransient: boolean;
|
|
||||||
legacyAwsBehavior: boolean;
|
|
||||||
};
|
|
||||||
|
|
||||||
export function patchLocations(
|
|
||||||
overlayLocations: OverlayLocations | undefined | null,
|
|
||||||
creds: any,
|
|
||||||
log: Logger
|
|
||||||
) {
|
|
||||||
const locs = overlayLocations ?? {};
|
|
||||||
return Object.entries(locs).reduce<{ [key: string]: Location }>(
|
|
||||||
(acc, [k, l]) => {
|
|
||||||
const location: Location = {
|
|
||||||
type: 'mem',
|
|
||||||
name: k,
|
|
||||||
objectId: l.objectId,
|
|
||||||
details: l.details || {},
|
|
||||||
locationType: l.locationType,
|
|
||||||
sizeLimitGB: l.sizeLimitGB || null,
|
|
||||||
isTransient: Boolean(l.isTransient),
|
|
||||||
legacyAwsBehavior: Boolean(l.legacyAwsBehavior),
|
|
||||||
};
|
|
||||||
let supportsVersioning = false;
|
|
||||||
let pathStyle = process.env.CI_CEPH !== undefined;
|
|
||||||
|
|
||||||
switch (l.locationType) {
|
|
||||||
case 'location-mem-v1':
|
|
||||||
location.type = 'mem';
|
|
||||||
location.details = { supportsVersioning: true };
|
|
||||||
break;
|
|
||||||
case 'location-file-v1':
|
|
||||||
location.type = 'file';
|
|
||||||
location.details = { supportsVersioning: true };
|
|
||||||
break;
|
|
||||||
case 'location-azure-v1':
|
|
||||||
location.type = 'azure';
|
|
||||||
if (l.details.secretKey && l.details.secretKey.length > 0) {
|
|
||||||
location.details = {
|
|
||||||
bucketMatch: l.details.bucketMatch,
|
|
||||||
azureStorageEndpoint: l.details.endpoint,
|
|
||||||
azureStorageAccountName: l.details.accessKey,
|
|
||||||
azureStorageAccessKey: decryptSecret(
|
|
||||||
creds,
|
|
||||||
l.details.secretKey
|
|
||||||
),
|
|
||||||
azureContainerName: l.details.bucketName,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'location-ceph-radosgw-s3-v1':
|
|
||||||
case 'location-scality-ring-s3-v1':
|
|
||||||
pathStyle = true; // fallthrough
|
|
||||||
case 'location-aws-s3-v1':
|
|
||||||
case 'location-wasabi-v1':
|
|
||||||
supportsVersioning = true; // fallthrough
|
|
||||||
case 'location-do-spaces-v1':
|
|
||||||
location.type = 'aws_s3';
|
|
||||||
if (l.details.secretKey && l.details.secretKey.length > 0) {
|
|
||||||
let https = true;
|
|
||||||
let awsEndpoint =
|
|
||||||
l.details.endpoint || 's3.amazonaws.com';
|
|
||||||
if (awsEndpoint.includes('://')) {
|
|
||||||
const url = new URL(awsEndpoint);
|
|
||||||
awsEndpoint = url.host;
|
|
||||||
https = url.protocol.includes('https');
|
|
||||||
}
|
|
||||||
|
|
||||||
location.details = {
|
|
||||||
credentials: {
|
|
||||||
accessKey: l.details.accessKey,
|
|
||||||
secretKey: decryptSecret(
|
|
||||||
creds,
|
|
||||||
l.details.secretKey
|
|
||||||
),
|
|
||||||
},
|
|
||||||
bucketName: l.details.bucketName,
|
|
||||||
bucketMatch: l.details.bucketMatch,
|
|
||||||
serverSideEncryption: Boolean(
|
|
||||||
l.details.serverSideEncryption
|
|
||||||
),
|
|
||||||
region: l.details.region,
|
|
||||||
awsEndpoint,
|
|
||||||
supportsVersioning,
|
|
||||||
pathStyle,
|
|
||||||
https,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'location-gcp-v1':
|
|
||||||
location.type = 'gcp';
|
|
||||||
if (l.details.secretKey && l.details.secretKey.length > 0) {
|
|
||||||
location.details = {
|
|
||||||
credentials: {
|
|
||||||
accessKey: l.details.accessKey,
|
|
||||||
secretKey: decryptSecret(
|
|
||||||
creds,
|
|
||||||
l.details.secretKey
|
|
||||||
),
|
|
||||||
},
|
|
||||||
bucketName: l.details.bucketName,
|
|
||||||
mpuBucketName: l.details.mpuBucketName,
|
|
||||||
bucketMatch: l.details.bucketMatch,
|
|
||||||
gcpEndpoint:
|
|
||||||
l.details.endpoint || 'storage.googleapis.com',
|
|
||||||
https: true,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'location-scality-sproxyd-v1':
|
|
||||||
location.type = 'scality';
|
|
||||||
if (
|
|
||||||
l.details &&
|
|
||||||
l.details.bootstrapList &&
|
|
||||||
l.details.proxyPath
|
|
||||||
) {
|
|
||||||
location.details = {
|
|
||||||
supportsVersioning: true,
|
|
||||||
connector: {
|
|
||||||
sproxyd: {
|
|
||||||
chordCos: l.details.chordCos || null,
|
|
||||||
bootstrap: l.details.bootstrapList,
|
|
||||||
path: l.details.proxyPath,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'location-nfs-mount-v1':
|
|
||||||
location.type = 'pfs';
|
|
||||||
if (l.details) {
|
|
||||||
location.details = {
|
|
||||||
supportsVersioning: true,
|
|
||||||
bucketMatch: true,
|
|
||||||
pfsDaemonEndpoint: {
|
|
||||||
host: `${l.name}-cosmos-pfsd`,
|
|
||||||
port: 80,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'location-scality-hdclient-v2':
|
|
||||||
location.type = 'scality';
|
|
||||||
if (l.details && l.details.bootstrapList) {
|
|
||||||
location.details = {
|
|
||||||
supportsVersioning: true,
|
|
||||||
connector: {
|
|
||||||
hdclient: {
|
|
||||||
bootstrap: l.details.bootstrapList,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
log.info('unknown location type', {
|
|
||||||
locationType: l.locationType,
|
|
||||||
});
|
|
||||||
return acc;
|
|
||||||
}
|
|
||||||
return { ...acc, [location.name]: location };
|
|
||||||
},
|
|
||||||
{}
|
|
||||||
);
|
|
||||||
}
|
|
|
@ -38,10 +38,6 @@
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"pattern": "^arn:aws:iam::[0-9]{12}:saml-provider/[\\w._-]{1,128}$"
|
"pattern": "^arn:aws:iam::[0-9]{12}:saml-provider/[\\w._-]{1,128}$"
|
||||||
},
|
},
|
||||||
"principalFederatedOidcIdp": {
|
|
||||||
"type": "string",
|
|
||||||
"pattern": "^(?:http(s)?:\/\/)?[\\w.-]+(?:\\.[\\w\\.-]+)+[\\w\\-\\._~:/?#[\\]@!\\$&'\\(\\)\\*\\+,;=.]+$"
|
|
||||||
},
|
|
||||||
"principalAWSItem": {
|
"principalAWSItem": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
@ -102,9 +98,6 @@
|
||||||
"oneOf": [
|
"oneOf": [
|
||||||
{
|
{
|
||||||
"$ref": "#/definitions/principalFederatedSamlIdp"
|
"$ref": "#/definitions/principalFederatedSamlIdp"
|
||||||
},
|
|
||||||
{
|
|
||||||
"$ref": "#/definitions/principalFederatedOidcIdp"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,39 +12,13 @@ import {
|
||||||
actionMapSSO,
|
actionMapSSO,
|
||||||
actionMapSTS,
|
actionMapSTS,
|
||||||
actionMapMetadata,
|
actionMapMetadata,
|
||||||
actionMapScuba,
|
|
||||||
} from './utils/actionMaps';
|
} from './utils/actionMaps';
|
||||||
|
|
||||||
export const actionNeedQuotaCheck = {
|
const _actionNeedQuotaCheck = {
|
||||||
objectPut: true,
|
objectPut: true,
|
||||||
objectPutVersion: true,
|
|
||||||
objectPutPart: true,
|
objectPutPart: true,
|
||||||
objectRestore: true,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* This variable describes APIs that change the bytes
|
|
||||||
* stored, requiring quota updates
|
|
||||||
*/
|
|
||||||
export const actionWithDataDeletion = {
|
|
||||||
objectDelete: true,
|
|
||||||
objectDeleteVersion: true,
|
|
||||||
multipartDelete: true,
|
|
||||||
multiObjectDelete: true,
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The function returns true if the current API call is a copy object
|
|
||||||
* and the action requires a quota evaluation logic, post retrieval
|
|
||||||
* of the object metadata.
|
|
||||||
* @param {string} action - the action being performed
|
|
||||||
* @param {string} currentApi - the current API being called
|
|
||||||
* @return {boolean} - whether the action requires a quota check
|
|
||||||
*/
|
|
||||||
export function actionNeedQuotaCheckCopy(action: string, currentApi: string) {
|
|
||||||
return action === 'objectGet' && (currentApi === 'objectCopy' || currentApi === 'objectPutCopyPart');
|
|
||||||
}
|
|
||||||
|
|
||||||
function _findAction(service: string, method: string) {
|
function _findAction(service: string, method: string) {
|
||||||
switch (service) {
|
switch (service) {
|
||||||
case 's3':
|
case 's3':
|
||||||
|
@ -62,8 +36,6 @@ function _findAction(service: string, method: string) {
|
||||||
return actionMapSTS[method];
|
return actionMapSTS[method];
|
||||||
case 'metadata':
|
case 'metadata':
|
||||||
return actionMapMetadata[method];
|
return actionMapMetadata[method];
|
||||||
case 'scuba':
|
|
||||||
return actionMapScuba[method];
|
|
||||||
default:
|
default:
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
@ -133,10 +105,6 @@ function _buildArn(
|
||||||
return `arn:scality:metadata::${requesterInfo!.accountid}:` +
|
return `arn:scality:metadata::${requesterInfo!.accountid}:` +
|
||||||
`${generalResource}/`;
|
`${generalResource}/`;
|
||||||
}
|
}
|
||||||
case 'scuba': {
|
|
||||||
return `arn:scality:scuba::${requesterInfo!.accountid}:` +
|
|
||||||
`${generalResource}${specificResource ? '/' + specificResource : ''}`;
|
|
||||||
}
|
|
||||||
default:
|
default:
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
@ -151,9 +119,7 @@ export type RequesterInfo = {
|
||||||
principalType: string;
|
principalType: string;
|
||||||
principaltype: string;
|
principaltype: string;
|
||||||
userid: string;
|
userid: string;
|
||||||
username: string;
|
username: string,
|
||||||
keycloakGroup: string;
|
|
||||||
keycloakRole: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -205,7 +171,6 @@ export default class RequestContext {
|
||||||
_needTagEval: boolean;
|
_needTagEval: boolean;
|
||||||
_foundAction?: string;
|
_foundAction?: string;
|
||||||
_foundResource?: string;
|
_foundResource?: string;
|
||||||
_objectLockRetentionDays?: number | null;
|
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
headers: { [key: string]: string | string[] },
|
headers: { [key: string]: string | string[] },
|
||||||
|
@ -227,7 +192,6 @@ export default class RequestContext {
|
||||||
requestObjTags?: string,
|
requestObjTags?: string,
|
||||||
existingObjTag?: string,
|
existingObjTag?: string,
|
||||||
needTagEval?: false,
|
needTagEval?: false,
|
||||||
objectLockRetentionDays?: number,
|
|
||||||
) {
|
) {
|
||||||
this._headers = headers;
|
this._headers = headers;
|
||||||
this._query = query;
|
this._query = query;
|
||||||
|
@ -256,12 +220,10 @@ export default class RequestContext {
|
||||||
this._securityToken = securityToken;
|
this._securityToken = securityToken;
|
||||||
this._policyArn = policyArn;
|
this._policyArn = policyArn;
|
||||||
this._action = action;
|
this._action = action;
|
||||||
this._needQuota = actionNeedQuotaCheck[apiMethod] === true
|
this._needQuota = _actionNeedQuotaCheck[apiMethod] === true;
|
||||||
|| actionWithDataDeletion[apiMethod] === true;
|
|
||||||
this._requestObjTags = requestObjTags || null;
|
this._requestObjTags = requestObjTags || null;
|
||||||
this._existingObjTag = existingObjTag || null;
|
this._existingObjTag = existingObjTag || null;
|
||||||
this._needTagEval = needTagEval || false;
|
this._needTagEval = needTagEval || false;
|
||||||
this._objectLockRetentionDays = objectLockRetentionDays || null;
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -293,7 +255,6 @@ export default class RequestContext {
|
||||||
requestObjTags: this._requestObjTags,
|
requestObjTags: this._requestObjTags,
|
||||||
existingObjTag: this._existingObjTag,
|
existingObjTag: this._existingObjTag,
|
||||||
needTagEval: this._needTagEval,
|
needTagEval: this._needTagEval,
|
||||||
objectLockRetentionDays: this._objectLockRetentionDays,
|
|
||||||
};
|
};
|
||||||
return JSON.stringify(requestInfo);
|
return JSON.stringify(requestInfo);
|
||||||
}
|
}
|
||||||
|
@ -334,7 +295,6 @@ export default class RequestContext {
|
||||||
obj.requestObjTags,
|
obj.requestObjTags,
|
||||||
obj.existingObjTag,
|
obj.existingObjTag,
|
||||||
obj.needTagEval,
|
obj.needTagEval,
|
||||||
obj.objectLockRetentionDays,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -738,24 +698,4 @@ export default class RequestContext {
|
||||||
getNeedTagEval() {
|
getNeedTagEval() {
|
||||||
return this._needTagEval;
|
return this._needTagEval;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Get object lock retention days
|
|
||||||
*
|
|
||||||
* @returns objectLockRetentionDays - object lock retention days
|
|
||||||
*/
|
|
||||||
getObjectLockRetentionDays() {
|
|
||||||
return this._objectLockRetentionDays;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set object lock retention days
|
|
||||||
*
|
|
||||||
* @param objectLockRetentionDays - object lock retention days
|
|
||||||
* @returns itself
|
|
||||||
*/
|
|
||||||
setObjectLockRetentionDays(objectLockRetentionDays: number) {
|
|
||||||
this._objectLockRetentionDays = objectLockRetentionDays;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -310,7 +310,6 @@ export function evaluatePolicy(
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @deprecated Upgrade to standardEvaluateAllPolicies
|
|
||||||
* Evaluate whether a request is permitted under a policy.
|
* Evaluate whether a request is permitted under a policy.
|
||||||
* @param requestContext - Info necessary to
|
* @param requestContext - Info necessary to
|
||||||
* evaluate permission
|
* evaluate permission
|
||||||
|
@ -326,16 +325,6 @@ export function evaluateAllPolicies(
|
||||||
allPolicies: any[],
|
allPolicies: any[],
|
||||||
log: Logger,
|
log: Logger,
|
||||||
): string {
|
): string {
|
||||||
return standardEvaluateAllPolicies(requestContext, allPolicies, log).verdict;
|
|
||||||
}
|
|
||||||
export function standardEvaluateAllPolicies(
|
|
||||||
requestContext: RequestContext,
|
|
||||||
allPolicies: any[],
|
|
||||||
log: Logger,
|
|
||||||
): {
|
|
||||||
verdict: string;
|
|
||||||
isImplicit: boolean;
|
|
||||||
} {
|
|
||||||
log.trace('evaluating all policies');
|
log.trace('evaluating all policies');
|
||||||
let allow = false;
|
let allow = false;
|
||||||
let allowWithTagCondition = false;
|
let allowWithTagCondition = false;
|
||||||
|
@ -344,10 +333,7 @@ export function standardEvaluateAllPolicies(
|
||||||
const singlePolicyVerdict = evaluatePolicy(requestContext, allPolicies[i], log);
|
const singlePolicyVerdict = evaluatePolicy(requestContext, allPolicies[i], log);
|
||||||
// If there is any Deny, just return Deny
|
// If there is any Deny, just return Deny
|
||||||
if (singlePolicyVerdict === 'Deny') {
|
if (singlePolicyVerdict === 'Deny') {
|
||||||
return {
|
return 'Deny';
|
||||||
verdict: 'Deny',
|
|
||||||
isImplicit: false,
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
if (singlePolicyVerdict === 'Allow') {
|
if (singlePolicyVerdict === 'Allow') {
|
||||||
allow = true;
|
allow = true;
|
||||||
|
@ -358,7 +344,6 @@ export function standardEvaluateAllPolicies(
|
||||||
} // else 'Neutral'
|
} // else 'Neutral'
|
||||||
}
|
}
|
||||||
let verdict;
|
let verdict;
|
||||||
let isImplicit = false;
|
|
||||||
if (allow) {
|
if (allow) {
|
||||||
if (denyWithTagCondition) {
|
if (denyWithTagCondition) {
|
||||||
verdict = 'NeedTagConditionEval';
|
verdict = 'NeedTagConditionEval';
|
||||||
|
@ -370,9 +355,8 @@ export function standardEvaluateAllPolicies(
|
||||||
verdict = 'NeedTagConditionEval';
|
verdict = 'NeedTagConditionEval';
|
||||||
} else {
|
} else {
|
||||||
verdict = 'Deny';
|
verdict = 'Deny';
|
||||||
isImplicit = true;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
log.trace('result of evaluating all policies', { verdict, isImplicit });
|
log.trace('result of evaluating all policies', { verdict });
|
||||||
return { verdict, isImplicit };
|
return verdict;
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,6 @@ const sharedActionMap = {
|
||||||
bypassGovernanceRetention: 's3:BypassGovernanceRetention',
|
bypassGovernanceRetention: 's3:BypassGovernanceRetention',
|
||||||
listMultipartUploads: 's3:ListBucketMultipartUploads',
|
listMultipartUploads: 's3:ListBucketMultipartUploads',
|
||||||
listParts: 's3:ListMultipartUploadParts',
|
listParts: 's3:ListMultipartUploadParts',
|
||||||
metadataSearch: 's3:MetadataSearch',
|
|
||||||
multipartDelete: 's3:AbortMultipartUpload',
|
multipartDelete: 's3:AbortMultipartUpload',
|
||||||
objectDelete: 's3:DeleteObject',
|
objectDelete: 's3:DeleteObject',
|
||||||
objectDeleteTagging: 's3:DeleteObjectTagging',
|
objectDeleteTagging: 's3:DeleteObjectTagging',
|
||||||
|
@ -48,14 +47,6 @@ const sharedActionMap = {
|
||||||
objectPutLegalHold: 's3:PutObjectLegalHold',
|
objectPutLegalHold: 's3:PutObjectLegalHold',
|
||||||
objectPutRetention: 's3:PutObjectRetention',
|
objectPutRetention: 's3:PutObjectRetention',
|
||||||
objectPutTagging: 's3:PutObjectTagging',
|
objectPutTagging: 's3:PutObjectTagging',
|
||||||
objectRestore: 's3:RestoreObject',
|
|
||||||
objectPutVersion: 's3:PutObjectVersion',
|
|
||||||
};
|
|
||||||
|
|
||||||
const actionMapBucketQuotas = {
|
|
||||||
bucketGetQuota: 'scality:GetBucketQuota',
|
|
||||||
bucketUpdateQuota: 'scality:UpdateBucketQuota',
|
|
||||||
bucketDeleteQuota: 'scality:DeleteBucketQuota',
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// action map used for request context
|
// action map used for request context
|
||||||
|
@ -71,7 +62,6 @@ const actionMapRQ = {
|
||||||
initiateMultipartUpload: 's3:PutObject',
|
initiateMultipartUpload: 's3:PutObject',
|
||||||
objectDeleteVersion: 's3:DeleteObjectVersion',
|
objectDeleteVersion: 's3:DeleteObjectVersion',
|
||||||
objectDeleteTaggingVersion: 's3:DeleteObjectVersionTagging',
|
objectDeleteTaggingVersion: 's3:DeleteObjectVersionTagging',
|
||||||
objectGetArchiveInfo: 'scality:GetObjectArchiveInfo',
|
|
||||||
objectGetVersion: 's3:GetObjectVersion',
|
objectGetVersion: 's3:GetObjectVersion',
|
||||||
objectGetACLVersion: 's3:GetObjectVersionAcl',
|
objectGetACLVersion: 's3:GetObjectVersionAcl',
|
||||||
objectGetTaggingVersion: 's3:GetObjectVersionTagging',
|
objectGetTaggingVersion: 's3:GetObjectVersionTagging',
|
||||||
|
@ -80,17 +70,14 @@ const actionMapRQ = {
|
||||||
objectPutTaggingVersion: 's3:PutObjectVersionTagging',
|
objectPutTaggingVersion: 's3:PutObjectVersionTagging',
|
||||||
serviceGet: 's3:ListAllMyBuckets',
|
serviceGet: 's3:ListAllMyBuckets',
|
||||||
objectReplicate: 's3:ReplicateObject',
|
objectReplicate: 's3:ReplicateObject',
|
||||||
objectGetRetentionVersion: 's3:GetObjectRetention',
|
objectPutRetentionVersion: 's3:PutObjectVersionRetention',
|
||||||
objectPutRetentionVersion: 's3:PutObjectRetention',
|
objectPutLegalHoldVersion: 's3:PutObjectVersionLegalHold',
|
||||||
objectGetLegalHoldVersion: 's3:GetObjectLegalHold',
|
|
||||||
objectPutLegalHoldVersion: 's3:PutObjectLegalHold',
|
|
||||||
listObjectVersions: 's3:ListBucketVersions',
|
listObjectVersions: 's3:ListBucketVersions',
|
||||||
...sharedActionMap,
|
...sharedActionMap,
|
||||||
...actionMapBucketQuotas,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// action map used for bucket policies
|
// action map used for bucket policies
|
||||||
const actionMapBP = actionMapRQ;
|
const actionMapBP = { ...sharedActionMap };
|
||||||
|
|
||||||
// action map for all relevant s3 actions
|
// action map for all relevant s3 actions
|
||||||
const actionMapS3 = {
|
const actionMapS3 = {
|
||||||
|
@ -138,7 +125,6 @@ const actionMonitoringMapS3 = {
|
||||||
initiateMultipartUpload: 'CreateMultipartUpload',
|
initiateMultipartUpload: 'CreateMultipartUpload',
|
||||||
listMultipartUploads: 'ListMultipartUploads',
|
listMultipartUploads: 'ListMultipartUploads',
|
||||||
listParts: 'ListParts',
|
listParts: 'ListParts',
|
||||||
metadataSearch: 'MetadataSearch',
|
|
||||||
multiObjectDelete: 'DeleteObjects',
|
multiObjectDelete: 'DeleteObjects',
|
||||||
multipartDelete: 'AbortMultipartUpload',
|
multipartDelete: 'AbortMultipartUpload',
|
||||||
objectCopy: 'CopyObject',
|
objectCopy: 'CopyObject',
|
||||||
|
@ -157,17 +143,7 @@ const actionMonitoringMapS3 = {
|
||||||
objectPutPart: 'UploadPart',
|
objectPutPart: 'UploadPart',
|
||||||
objectPutRetention: 'PutObjectRetention',
|
objectPutRetention: 'PutObjectRetention',
|
||||||
objectPutTagging: 'PutObjectTagging',
|
objectPutTagging: 'PutObjectTagging',
|
||||||
objectRestore: 'RestoreObject',
|
|
||||||
serviceGet: 'ListBuckets',
|
serviceGet: 'ListBuckets',
|
||||||
bucketGetQuota: 'GetBucketQuota',
|
|
||||||
bucketUpdateQuota: 'UpdateBucketQuota',
|
|
||||||
bucketDeleteQuota: 'DeleteBucketQuota',
|
|
||||||
};
|
|
||||||
|
|
||||||
const actionMapAccountQuotas = {
|
|
||||||
UpdateAccountQuota : 'scality:UpdateAccountQuota',
|
|
||||||
DeleteAccountQuota : 'scality:DeleteAccountQuota',
|
|
||||||
GetAccountQuota : 'scality:GetAccountQuota',
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const actionMapIAM = {
|
const actionMapIAM = {
|
||||||
|
@ -192,7 +168,6 @@ const actionMapIAM = {
|
||||||
getPolicyVersion: 'iam:GetPolicyVersion',
|
getPolicyVersion: 'iam:GetPolicyVersion',
|
||||||
getUser: 'iam:GetUser',
|
getUser: 'iam:GetUser',
|
||||||
listAccessKeys: 'iam:ListAccessKeys',
|
listAccessKeys: 'iam:ListAccessKeys',
|
||||||
listEntitiesForPolicy: 'iam:ListEntitiesForPolicy',
|
|
||||||
listGroupPolicies: 'iam:ListGroupPolicies',
|
listGroupPolicies: 'iam:ListGroupPolicies',
|
||||||
listGroups: 'iam:ListGroups',
|
listGroups: 'iam:ListGroups',
|
||||||
listGroupsForUser: 'iam:ListGroupsForUser',
|
listGroupsForUser: 'iam:ListGroupsForUser',
|
||||||
|
@ -211,7 +186,6 @@ const actionMapIAM = {
|
||||||
tagUser: 'iam:TagUser',
|
tagUser: 'iam:TagUser',
|
||||||
unTagUser: 'iam:UntagUser',
|
unTagUser: 'iam:UntagUser',
|
||||||
listUserTags: 'iam:ListUserTags',
|
listUserTags: 'iam:ListUserTags',
|
||||||
...actionMapAccountQuotas,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const actionMapSSO = {
|
const actionMapSSO = {
|
||||||
|
@ -227,14 +201,6 @@ const actionMapMetadata = {
|
||||||
default: 'metadata:bucketd',
|
default: 'metadata:bucketd',
|
||||||
};
|
};
|
||||||
|
|
||||||
const actionMapScuba = {
|
|
||||||
GetMetrics: 'scuba:GetMetrics',
|
|
||||||
AdminStartIngest: 'scuba:AdminStartIngest',
|
|
||||||
AdminStopIngest: 'scuba:AdminStopIngest',
|
|
||||||
AdminReadRaftCseq: 'scuba:AdminReadRaftCseq',
|
|
||||||
AdminTriggerRepair: 'scuba:AdminTriggerRepair',
|
|
||||||
};
|
|
||||||
|
|
||||||
export {
|
export {
|
||||||
actionMapRQ,
|
actionMapRQ,
|
||||||
actionMapBP,
|
actionMapBP,
|
||||||
|
@ -244,5 +210,4 @@ export {
|
||||||
actionMapSSO,
|
actionMapSSO,
|
||||||
actionMapSTS,
|
actionMapSTS,
|
||||||
actionMapMetadata,
|
actionMapMetadata,
|
||||||
actionMapScuba,
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import { handleWildcardInResource } from './wildcards';
|
import { handleWildcardInResource } from './wildcards';
|
||||||
import { policyArnAllowedEmptyAccountId } from '../../constants';
|
|
||||||
/**
|
/**
|
||||||
* Checks whether an ARN from a request matches an ARN in a policy
|
* Checks whether an ARN from a request matches an ARN in a policy
|
||||||
* to compare against each portion of the ARN from the request
|
* to compare against each portion of the ARN from the request
|
||||||
|
@ -38,10 +38,9 @@ export default function checkArnMatch(
|
||||||
const requestSegment = caseSensitive ? requestArnArr[j] :
|
const requestSegment = caseSensitive ? requestArnArr[j] :
|
||||||
requestArnArr[j].toLowerCase();
|
requestArnArr[j].toLowerCase();
|
||||||
const policyArnArr = policyArn.split(':');
|
const policyArnArr = policyArn.split(':');
|
||||||
// We want to allow an empty account ID for utapi and scuba service ARNs to not
|
// We want to allow an empty account ID for utapi service ARNs to not
|
||||||
// break compatibility.
|
// break compatibility.
|
||||||
if (j === 4 && policyArnAllowedEmptyAccountId.includes(policyArnArr[2])
|
if (j === 4 && policyArnArr[2] === 'utapi' && policyArnArr[4] === '') {
|
||||||
&& policyArnArr[4] === '') {
|
|
||||||
continue;
|
continue;
|
||||||
} else if (!segmentRegEx.test(requestSegment)) {
|
} else if (!segmentRegEx.test(requestSegment)) {
|
||||||
return false;
|
return false;
|
||||||
|
|
|
@ -142,8 +142,6 @@ export function findConditionKey(
|
||||||
// header
|
// header
|
||||||
case 's3:ObjLocationConstraint': return headers['x-amz-meta-scal-location-constraint'];
|
case 's3:ObjLocationConstraint': return headers['x-amz-meta-scal-location-constraint'];
|
||||||
case 'sts:ExternalId': return requestContext.getRequesterExternalId();
|
case 'sts:ExternalId': return requestContext.getRequesterExternalId();
|
||||||
case 'keycloak:groups': return requesterInfo.keycloakGroup;
|
|
||||||
case 'keycloak:roles': return requesterInfo.keycloakRole;
|
|
||||||
case 'iam:PolicyArn': return requestContext.getPolicyArn();
|
case 'iam:PolicyArn': return requestContext.getPolicyArn();
|
||||||
// s3:ExistingObjectTag - Used to check that existing object tag has
|
// s3:ExistingObjectTag - Used to check that existing object tag has
|
||||||
// specific tag key and value. Extraction of correct tag key is done in CloudServer.
|
// specific tag key and value. Extraction of correct tag key is done in CloudServer.
|
||||||
|
@ -168,9 +166,6 @@ export function findConditionKey(
|
||||||
return requestContext.getNeedTagEval() && requestContext.getRequestObjTags()
|
return requestContext.getNeedTagEval() && requestContext.getRequestObjTags()
|
||||||
? getTagKeys(requestContext.getRequestObjTags()!)
|
? getTagKeys(requestContext.getRequestObjTags()!)
|
||||||
: undefined;
|
: undefined;
|
||||||
// The maximum retention period is 100 years.
|
|
||||||
case 's3:object-lock-remaining-retention-days':
|
|
||||||
return requestContext.getObjectLockRetentionDays() || undefined;
|
|
||||||
default:
|
default:
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,7 +30,7 @@ export default class ResultsCollector extends EventEmitter {
|
||||||
* @emits ResultCollector#done
|
* @emits ResultCollector#done
|
||||||
* @emits ResultCollector#error
|
* @emits ResultCollector#error
|
||||||
*/
|
*/
|
||||||
pushResult(err: Error | null | undefined, subPartIndex: number) {
|
pushResult(err: Error | undefined, subPartIndex: number) {
|
||||||
this._results.push({
|
this._results.push({
|
||||||
error: err,
|
error: err,
|
||||||
subPartIndex,
|
subPartIndex,
|
||||||
|
|
|
@ -1,15 +1,11 @@
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import * as crypto from 'crypto';
|
import * as crypto from 'crypto';
|
||||||
import * as stream from 'stream';
|
import * as stream from 'stream';
|
||||||
import azure from '@azure/storage-blob';
|
|
||||||
|
|
||||||
import { RequestLogger } from 'werelogs';
|
|
||||||
|
|
||||||
import ResultsCollector from './ResultsCollector';
|
import ResultsCollector from './ResultsCollector';
|
||||||
import SubStreamInterface from './SubStreamInterface';
|
import SubStreamInterface from './SubStreamInterface';
|
||||||
import * as objectUtils from '../objectUtils';
|
import * as objectUtils from '../objectUtils';
|
||||||
import MD5Sum from '../MD5Sum';
|
import MD5Sum from '../MD5Sum';
|
||||||
import errors, { ArsenalError } from '../../errors';
|
import errors from '../../errors';
|
||||||
|
|
||||||
export const splitter = '|';
|
export const splitter = '|';
|
||||||
export const overviewMpuKey = 'azure_mpu';
|
export const overviewMpuKey = 'azure_mpu';
|
||||||
|
@ -65,7 +61,7 @@ export const getBlockId = (
|
||||||
const paddedSubPart = padString(subPartIndex, 'subPart');
|
const paddedSubPart = padString(subPartIndex, 'subPart');
|
||||||
const blockId = `${uploadId}${splitter}partNumber${paddedPartNumber}` +
|
const blockId = `${uploadId}${splitter}partNumber${paddedPartNumber}` +
|
||||||
`${splitter}subPart${paddedSubPart}${splitter}`;
|
`${splitter}subPart${paddedSubPart}${splitter}`;
|
||||||
return Buffer.from(padString(blockId, 'part')).toString('base64');
|
return padString(blockId, 'part');
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getSummaryPartId = (partNumber: number, eTag: string, size: number) => {
|
export const getSummaryPartId = (partNumber: number, eTag: string, size: number) => {
|
||||||
|
@ -104,17 +100,10 @@ export const getSubPartIds = (
|
||||||
) => [...Array(part.numberSubParts).keys()].map(subPartIndex =>
|
) => [...Array(part.numberSubParts).keys()].map(subPartIndex =>
|
||||||
getBlockId(uploadId, part.partNumber, subPartIndex));
|
getBlockId(uploadId, part.partNumber, subPartIndex));
|
||||||
|
|
||||||
type ErrorWrapperFn = (
|
// TODO Better type this
|
||||||
s3Method: string,
|
|
||||||
azureMethod: string,
|
|
||||||
command: (client: azure.ContainerClient) => Promise<any>,
|
|
||||||
log: RequestLogger,
|
|
||||||
cb: (err: ArsenalError | null | undefined) => void,
|
|
||||||
) => void
|
|
||||||
|
|
||||||
export const putSinglePart = (
|
export const putSinglePart = (
|
||||||
errorWrapperFn: ErrorWrapperFn,
|
errorWrapperFn: (first: string, second: string, third: any, log: any, cb: any) => void,
|
||||||
request: stream.Readable,
|
request: any,
|
||||||
params: {
|
params: {
|
||||||
bucketName: string;
|
bucketName: string;
|
||||||
partNumber: number;
|
partNumber: number;
|
||||||
|
@ -125,44 +114,44 @@ export const putSinglePart = (
|
||||||
},
|
},
|
||||||
dataStoreName: string,
|
dataStoreName: string,
|
||||||
log: RequestLogger,
|
log: RequestLogger,
|
||||||
cb: (err: ArsenalError | null | undefined, dataStoreETag?: string, size?: number) => void,
|
cb: any,
|
||||||
) => {
|
) => {
|
||||||
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
|
const { bucketName, partNumber, size, objectKey, contentMD5, uploadId }
|
||||||
= params;
|
= params;
|
||||||
const blockId = getBlockId(uploadId, partNumber, 0);
|
const blockId = getBlockId(uploadId, partNumber, 0);
|
||||||
const passThrough = new stream.PassThrough();
|
const passThrough = new stream.PassThrough();
|
||||||
const options = contentMD5
|
const options = contentMD5
|
||||||
? { transactionalContentMD5: objectUtils.getMD5Buffer(contentMD5) }
|
? { useTransactionalMD5: true, transactionalContentMD5: contentMD5 }
|
||||||
: {};
|
: {};
|
||||||
request.pipe(passThrough);
|
request.pipe(passThrough);
|
||||||
return errorWrapperFn('uploadPart', 'createBlockFromStream', async client => {
|
return errorWrapperFn('uploadPart', 'createBlockFromStream',
|
||||||
try {
|
[blockId, bucketName, objectKey, passThrough, size, options,
|
||||||
const result = await client.getBlockBlobClient(objectKey)
|
(err: any | null, result: any) => {
|
||||||
.stageBlock(blockId, () => passThrough, size, options);
|
if (err) {
|
||||||
const md5 = result.contentMD5 || '';
|
|
||||||
const eTag = objectUtils.getHexMD5(md5);
|
|
||||||
return eTag
|
|
||||||
} catch (err: any) {
|
|
||||||
log.error('Error from Azure data backend uploadPart',
|
log.error('Error from Azure data backend uploadPart',
|
||||||
{ error: err.message, dataStoreName });
|
{ error: err.message, dataStoreName });
|
||||||
if (err.code === 'ContainerNotFound') {
|
if (err.code === 'ContainerNotFound') {
|
||||||
throw errors.NoSuchBucket;
|
return cb(errors.NoSuchBucket);
|
||||||
}
|
}
|
||||||
if (err.code === 'InvalidMd5') {
|
if (err.code === 'InvalidMd5') {
|
||||||
throw errors.InvalidDigest;
|
return cb(errors.InvalidDigest);
|
||||||
}
|
}
|
||||||
if (err.code === 'Md5Mismatch') {
|
if (err.code === 'Md5Mismatch') {
|
||||||
throw errors.BadDigest;
|
return cb(errors.BadDigest);
|
||||||
}
|
}
|
||||||
throw errors.InternalError.customizeDescription(
|
return cb(errors.InternalError.customizeDescription(
|
||||||
`Error returned from Azure: ${err.message}`
|
`Error returned from Azure: ${err.message}`),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}, log, cb);
|
const md5 = result.headers['content-md5'] || '';
|
||||||
|
const eTag = objectUtils.getHexMD5(md5);
|
||||||
|
return cb(null, eTag, size);
|
||||||
|
}], log, cb);
|
||||||
};
|
};
|
||||||
|
|
||||||
const putNextSubPart = (
|
// TODO type this
|
||||||
errorWrapperFn: ErrorWrapperFn,
|
export const putNextSubPart = (
|
||||||
|
errorWrapperFn: any,
|
||||||
partParams: {
|
partParams: {
|
||||||
uploadId: string;
|
uploadId: string;
|
||||||
partNumber: number;
|
partNumber: number;
|
||||||
|
@ -170,10 +159,11 @@ const putNextSubPart = (
|
||||||
objectKey: string;
|
objectKey: string;
|
||||||
},
|
},
|
||||||
subPartInfo: { lastPartIndex: number; lastPartSize: number },
|
subPartInfo: { lastPartIndex: number; lastPartSize: number },
|
||||||
subPartStream: stream.Readable,
|
subPartStream: any,
|
||||||
subPartIndex: number,
|
subPartIndex: number,
|
||||||
resultsCollector: ResultsCollector,
|
resultsCollector: ResultsCollector,
|
||||||
log: RequestLogger,
|
log: RequestLogger,
|
||||||
|
cb: any,
|
||||||
) => {
|
) => {
|
||||||
const { uploadId, partNumber, bucketName, objectKey } = partParams;
|
const { uploadId, partNumber, bucketName, objectKey } = partParams;
|
||||||
const subPartSize = getSubPartSize(
|
const subPartSize = getSubPartSize(
|
||||||
|
@ -181,20 +171,14 @@ const putNextSubPart = (
|
||||||
const subPartId = getBlockId(uploadId, partNumber,
|
const subPartId = getBlockId(uploadId, partNumber,
|
||||||
subPartIndex);
|
subPartIndex);
|
||||||
resultsCollector.pushOp();
|
resultsCollector.pushOp();
|
||||||
errorWrapperFn('uploadPart', 'createBlockFromStream', async client => {
|
errorWrapperFn('uploadPart', 'createBlockFromStream',
|
||||||
try {
|
[subPartId, bucketName, objectKey, subPartStream, subPartSize,
|
||||||
const result = await client.getBlockBlobClient(objectKey)
|
{}, err => resultsCollector.pushResult(err, subPartIndex)], log, cb);
|
||||||
.stageBlock(subPartId, () => subPartStream, subPartSize, {});
|
|
||||||
resultsCollector.pushResult(null, subPartIndex);
|
|
||||||
} catch (err: any) {
|
|
||||||
resultsCollector.pushResult(err, subPartIndex);
|
|
||||||
}
|
|
||||||
}, log, () => {});
|
|
||||||
};
|
};
|
||||||
|
|
||||||
export const putSubParts = (
|
export const putSubParts = (
|
||||||
errorWrapperFn: ErrorWrapperFn,
|
errorWrapperFn: any,
|
||||||
request: stream.Readable,
|
request: any,
|
||||||
params: {
|
params: {
|
||||||
uploadId: string;
|
uploadId: string;
|
||||||
partNumber: number;
|
partNumber: number;
|
||||||
|
@ -204,7 +188,7 @@ export const putSubParts = (
|
||||||
},
|
},
|
||||||
dataStoreName: string,
|
dataStoreName: string,
|
||||||
log: RequestLogger,
|
log: RequestLogger,
|
||||||
cb: (err: ArsenalError | null | undefined, dataStoreETag?: string) => void,
|
cb: any,
|
||||||
) => {
|
) => {
|
||||||
const subPartInfo = getSubPartInfo(params.size);
|
const subPartInfo = getSubPartInfo(params.size);
|
||||||
const resultsCollector = new ResultsCollector();
|
const resultsCollector = new ResultsCollector();
|
||||||
|
@ -243,13 +227,14 @@ export const putSubParts = (
|
||||||
const totalLength = streamInterface.getTotalBytesStreamed();
|
const totalLength = streamInterface.getTotalBytesStreamed();
|
||||||
log.trace('successfully put subparts to Azure',
|
log.trace('successfully put subparts to Azure',
|
||||||
{ numberSubParts, totalLength });
|
{ numberSubParts, totalLength });
|
||||||
hashedStream.on('hashed', () => cb(null, hashedStream.completedHash));
|
hashedStream.on('hashed', () => cb(null, hashedStream.completedHash,
|
||||||
|
totalLength));
|
||||||
|
|
||||||
// in case the hashed event was already emitted before the
|
// in case the hashed event was already emitted before the
|
||||||
// event handler was registered:
|
// event handler was registered:
|
||||||
if (hashedStream.completedHash) {
|
if (hashedStream.completedHash) {
|
||||||
hashedStream.removeAllListeners('hashed');
|
hashedStream.removeAllListeners('hashed');
|
||||||
return cb(null, hashedStream.completedHash);
|
return cb(null, hashedStream.completedHash, totalLength);
|
||||||
}
|
}
|
||||||
return undefined;
|
return undefined;
|
||||||
});
|
});
|
||||||
|
@ -257,7 +242,7 @@ export const putSubParts = (
|
||||||
const currentStream = streamInterface.getCurrentStream();
|
const currentStream = streamInterface.getCurrentStream();
|
||||||
// start first put to Azure before we start streaming the data
|
// start first put to Azure before we start streaming the data
|
||||||
putNextSubPart(errorWrapperFn, params, subPartInfo,
|
putNextSubPart(errorWrapperFn, params, subPartInfo,
|
||||||
currentStream, 0, resultsCollector, log);
|
currentStream, 0, resultsCollector, log, cb);
|
||||||
|
|
||||||
request.pipe(hashedStream);
|
request.pipe(hashedStream);
|
||||||
hashedStream.on('end', () => {
|
hashedStream.on('end', () => {
|
||||||
|
@ -277,8 +262,8 @@ export const putSubParts = (
|
||||||
}
|
}
|
||||||
const { nextStream, subPartIndex } =
|
const { nextStream, subPartIndex } =
|
||||||
streamInterface.transitionToNextStream();
|
streamInterface.transitionToNextStream();
|
||||||
putNextSubPart(errorWrapperFn, params, subPartInfo, nextStream,
|
putNextSubPart(errorWrapperFn, params, subPartInfo,
|
||||||
subPartIndex, resultsCollector, log);
|
nextStream, subPartIndex, resultsCollector, log, cb);
|
||||||
streamInterface.write(firstChunk);
|
streamInterface.write(firstChunk);
|
||||||
} else {
|
} else {
|
||||||
streamInterface.write(data);
|
streamInterface.write(data);
|
||||||
|
|
|
@ -1,25 +1,19 @@
|
||||||
import { scaleMsPerDay } from '../objectUtils';
|
const oneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
|
||||||
const msInOneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
|
|
||||||
|
|
||||||
export default class LifecycleDateTime {
|
export default class LifecycleDateTime {
|
||||||
_transitionOneDayEarlier?: boolean;
|
_transitionOneDayEarlier?: boolean;
|
||||||
_expireOneDayEarlier?: boolean;
|
_expireOneDayEarlier?: boolean;
|
||||||
_timeProgressionFactor?: number;
|
|
||||||
_scaledMsPerDay: number;
|
|
||||||
|
|
||||||
constructor(params?: {
|
constructor(params?: {
|
||||||
transitionOneDayEarlier: boolean;
|
transitionOneDayEarlier: boolean;
|
||||||
expireOneDayEarlier: boolean;
|
expireOneDayEarlier: boolean;
|
||||||
timeProgressionFactor: number;
|
|
||||||
}) {
|
}) {
|
||||||
this._transitionOneDayEarlier = params?.transitionOneDayEarlier;
|
this._transitionOneDayEarlier = params?.transitionOneDayEarlier;
|
||||||
this._expireOneDayEarlier = params?.expireOneDayEarlier;
|
this._expireOneDayEarlier = params?.expireOneDayEarlier;
|
||||||
this._timeProgressionFactor = params?.timeProgressionFactor || 1;
|
|
||||||
this._scaledMsPerDay = scaleMsPerDay(this._timeProgressionFactor);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
getCurrentDate() {
|
getCurrentDate() {
|
||||||
const timeTravel = this._expireOneDayEarlier ? msInOneDay : 0;
|
const timeTravel = this._expireOneDayEarlier ? oneDay : 0;
|
||||||
return Date.now() + timeTravel;
|
return Date.now() + timeTravel;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -31,7 +25,7 @@ export default class LifecycleDateTime {
|
||||||
findDaysSince(date: Date) {
|
findDaysSince(date: Date) {
|
||||||
const now = this.getCurrentDate();
|
const now = this.getCurrentDate();
|
||||||
const diff = now - date.getTime();
|
const diff = now - date.getTime();
|
||||||
return Math.floor(diff / this._scaledMsPerDay);
|
return Math.floor(diff / (1000 * 60 * 60 * 24));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -58,25 +52,8 @@ export default class LifecycleDateTime {
|
||||||
}
|
}
|
||||||
if (transition.Days !== undefined) {
|
if (transition.Days !== undefined) {
|
||||||
const lastModifiedTime = this.getTimestamp(lastModified);
|
const lastModifiedTime = this.getTimestamp(lastModified);
|
||||||
const timeTravel = this._transitionOneDayEarlier ? -msInOneDay : 0;
|
const timeTravel = this._transitionOneDayEarlier ? -oneDay : 0;
|
||||||
return lastModifiedTime + (transition.Days * this._scaledMsPerDay) + timeTravel;
|
return lastModifiedTime + (transition.Days * oneDay) + timeTravel;
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Find the Unix time at which the non-current version transition should occur.
|
|
||||||
* @param transition - A non-current version transition from the lifecycle non-current version transitions
|
|
||||||
* @param lastModified - The object's last modified date
|
|
||||||
* @return - The normalized transition timestamp
|
|
||||||
*/
|
|
||||||
getNCVTransitionTimestamp(
|
|
||||||
transition: { NoncurrentDays?: number },
|
|
||||||
lastModified: string,
|
|
||||||
) {
|
|
||||||
if (transition.NoncurrentDays !== undefined) {
|
|
||||||
const lastModifiedTime = this.getTimestamp(lastModified);
|
|
||||||
const timeTravel = this._transitionOneDayEarlier ? -msInOneDay : 0;
|
|
||||||
return lastModifiedTime + (transition.NoncurrentDays * this._scaledMsPerDay) + timeTravel;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,47 +61,6 @@ export default class LifecycleUtils {
|
||||||
return trans1 > trans2 ? transition1 : transition2;
|
return trans1 > trans2 ? transition1 : transition2;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Compare two non-current version transition rules and return the one that is most recent.
|
|
||||||
* @param params - The function parameters
|
|
||||||
* @param params.transition1 - A non-current version transition from the current rule
|
|
||||||
* @param params.transition2 - A non-current version transition from the previous rule
|
|
||||||
* @param params.lastModified - The object's last modified
|
|
||||||
* date
|
|
||||||
* @return The most applicable transition rule
|
|
||||||
*/
|
|
||||||
compareNCVTransitions(params: {
|
|
||||||
lastModified: string;
|
|
||||||
transition1: any;
|
|
||||||
transition2?: any;
|
|
||||||
}): number | undefined;
|
|
||||||
compareNCVTransitions(params: {
|
|
||||||
lastModified: string;
|
|
||||||
transition1?: any;
|
|
||||||
transition2: any;
|
|
||||||
}): number | undefined;
|
|
||||||
compareNCVTransitions(params: {
|
|
||||||
lastModified: string;
|
|
||||||
transition1: any;
|
|
||||||
transition2: any;
|
|
||||||
}): number | undefined;
|
|
||||||
compareNCVTransitions(params: {
|
|
||||||
lastModified: string;
|
|
||||||
transition1?: any;
|
|
||||||
transition2?: any;
|
|
||||||
}) {
|
|
||||||
const { transition1, transition2, lastModified } = params;
|
|
||||||
if (transition1 === undefined) {
|
|
||||||
return transition2;
|
|
||||||
}
|
|
||||||
if (transition2 === undefined) {
|
|
||||||
return transition1;
|
|
||||||
}
|
|
||||||
const trans1 = this._datetime.getNCVTransitionTimestamp(transition1!, lastModified)!;
|
|
||||||
const trans2 = this._datetime.getNCVTransitionTimestamp(transition2!, lastModified)!;
|
|
||||||
return trans1 > trans2 ? transition1 : transition2;
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO Fix This
|
// TODO Fix This
|
||||||
/**
|
/**
|
||||||
* Find the most relevant trantition rule for the given transitions array
|
* Find the most relevant trantition rule for the given transitions array
|
||||||
|
@ -139,42 +98,6 @@ export default class LifecycleUtils {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Find the most relevant non-current version transition rule for the given transitions array
|
|
||||||
* and any previously stored non-current version transition from another rule.
|
|
||||||
* @param params - The function parameters
|
|
||||||
* @param params.transitions - Array of lifecycle non-current version transitions
|
|
||||||
* @param params.lastModified - The object's last modified
|
|
||||||
* date
|
|
||||||
* @return The most applicable non-current version transition rule
|
|
||||||
*/
|
|
||||||
getApplicableNCVTransition(params: {
|
|
||||||
store: any;
|
|
||||||
currentDate: Date;
|
|
||||||
transitions: any[];
|
|
||||||
lastModified: string;
|
|
||||||
}) {
|
|
||||||
const { transitions, store, lastModified, currentDate } = params;
|
|
||||||
const transition = transitions.reduce((result, transition) => {
|
|
||||||
const isApplicable = // Is the transition time in the past?
|
|
||||||
this._datetime.getTimestamp(currentDate) >=
|
|
||||||
this._datetime.getNCVTransitionTimestamp(transition, lastModified)!;
|
|
||||||
if (!isApplicable) {
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
return this.compareNCVTransitions({
|
|
||||||
transition1: transition,
|
|
||||||
transition2: result,
|
|
||||||
lastModified,
|
|
||||||
});
|
|
||||||
}, undefined);
|
|
||||||
return this.compareNCVTransitions({
|
|
||||||
transition1: transition,
|
|
||||||
transition2: store.NoncurrentVersionTransition,
|
|
||||||
lastModified,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO
|
// TODO
|
||||||
/**
|
/**
|
||||||
* Filter out all rules based on `Status` and `Filter` (Prefix and Tags)
|
* Filter out all rules based on `Status` and `Filter` (Prefix and Tags)
|
||||||
|
@ -318,17 +241,7 @@ export default class LifecycleUtils {
|
||||||
currentDate,
|
currentDate,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// TODO: Add support for NoncurrentVersionTransitions.
|
||||||
const ncvt = 'NoncurrentVersionTransitions';
|
|
||||||
const hasNoncurrentVersionTransitions = Array.isArray(rule[ncvt]) && rule[ncvt].length > 0;
|
|
||||||
if (hasNoncurrentVersionTransitions && this._supportedRules.includes('noncurrentVersionTransition')) {
|
|
||||||
store.NoncurrentVersionTransition = this.getApplicableNCVTransition({
|
|
||||||
transitions: rule.NoncurrentVersionTransitions,
|
|
||||||
lastModified: metadata.LastModified,
|
|
||||||
store,
|
|
||||||
currentDate,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return store;
|
return store;
|
||||||
}, {});
|
}, {});
|
||||||
// Do not transition to a location where the object is already stored.
|
// Do not transition to a location where the object is already stored.
|
||||||
|
@ -336,12 +249,6 @@ export default class LifecycleUtils {
|
||||||
&& applicableRules.Transition.StorageClass === metadata.StorageClass) {
|
&& applicableRules.Transition.StorageClass === metadata.StorageClass) {
|
||||||
applicableRules.Transition = undefined;
|
applicableRules.Transition = undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (applicableRules.NoncurrentVersionTransition
|
|
||||||
&& applicableRules.NoncurrentVersionTransition.StorageClass === metadata.StorageClass) {
|
|
||||||
applicableRules.NoncurrentVersionTransition = undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
return applicableRules;
|
return applicableRules;
|
||||||
/* eslint-enable no-param-reassign */
|
/* eslint-enable no-param-reassign */
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,110 +0,0 @@
|
||||||
import {parseStringPromise} from 'xml2js';
|
|
||||||
import errors, {ArsenalError} from '../errors';
|
|
||||||
import * as werelogs from 'werelogs';
|
|
||||||
import {validRestoreObjectTiers} from "../constants";
|
|
||||||
|
|
||||||
/*
|
|
||||||
Format of xml request:
|
|
||||||
<RestoreRequest>
|
|
||||||
<Days>integer</Days>
|
|
||||||
<Tier>Standard|Bulk|Expedited</Tier>
|
|
||||||
</RestoreRequest>
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* validate restore request xml
|
|
||||||
* @param restoreRequest - parsed restore request object
|
|
||||||
* @return{ArsenalError|undefined} - error on failure, undefined on success
|
|
||||||
*/
|
|
||||||
function validateRestoreRequest(restoreRequest?: any) {
|
|
||||||
if (!restoreRequest) {
|
|
||||||
const desc = 'request xml does not contain RestoreRequest';
|
|
||||||
return errors.MalformedXML.customizeDescription(desc);
|
|
||||||
}
|
|
||||||
if (!restoreRequest.Days || !restoreRequest.Days[0]) {
|
|
||||||
const desc = 'request xml does not contain RestoreRequest.Days';
|
|
||||||
return errors.MalformedXML.customizeDescription(desc);
|
|
||||||
}
|
|
||||||
// RestoreRequest.Days must be greater than or equal to 1
|
|
||||||
const daysValue = Number.parseInt(restoreRequest.Days[0], 10);
|
|
||||||
if (Number.isNaN(daysValue)) {
|
|
||||||
const desc = `RestoreRequest.Days is invalid type. [${restoreRequest.Days[0]}]`;
|
|
||||||
return errors.MalformedXML.customizeDescription(desc);
|
|
||||||
}
|
|
||||||
if (daysValue < 1) {
|
|
||||||
const desc = `RestoreRequest.Days must be greater than 0. [${restoreRequest.Days[0]}]`;
|
|
||||||
return errors.MalformedXML.customizeDescription(desc);
|
|
||||||
}
|
|
||||||
if (daysValue > 2147483647) {
|
|
||||||
const desc = `RestoreRequest.Days must be less than 2147483648. [${restoreRequest.Days[0]}]`;
|
|
||||||
return errors.MalformedXML.customizeDescription(desc);
|
|
||||||
}
|
|
||||||
if (restoreRequest.Tier && restoreRequest.Tier[0] && !validRestoreObjectTiers.has(restoreRequest.Tier[0])) {
|
|
||||||
const desc = `RestoreRequest.Tier is invalid value. [${restoreRequest.Tier[0]}]`;
|
|
||||||
return errors.MalformedXML.customizeDescription(desc);
|
|
||||||
}
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* parseRestoreRequestXml - Parse and validate xml body, returning callback with
|
|
||||||
* object restoreReqObj: { days: <value>, tier: <value> }
|
|
||||||
* @param xml - xml body to parse and validate
|
|
||||||
* @param log - Werelogs logger
|
|
||||||
* @param cb - callback to server
|
|
||||||
* @return - calls callback with object restore request or error
|
|
||||||
*/
|
|
||||||
export async function parseRestoreRequestXml(
|
|
||||||
xml: string,
|
|
||||||
log: werelogs.Logger,
|
|
||||||
cb: (err: ArsenalError | null, data?: any) => void,
|
|
||||||
) {
|
|
||||||
let result;
|
|
||||||
try {
|
|
||||||
result = await parseStringPromise(xml);
|
|
||||||
} catch (err) {
|
|
||||||
log.debug('xml parsing failed', {
|
|
||||||
error: err,
|
|
||||||
method: 'parseRestoreXml',
|
|
||||||
xml,
|
|
||||||
});
|
|
||||||
return cb(errors.MalformedXML);
|
|
||||||
}
|
|
||||||
if (!result) {
|
|
||||||
const desc = 'request xml is undefined or empty';
|
|
||||||
return cb(errors.MalformedXML.customizeDescription(desc));
|
|
||||||
}
|
|
||||||
const restoreRequest = result.RestoreRequest;
|
|
||||||
const restoreReqError = validateRestoreRequest(restoreRequest);
|
|
||||||
if (restoreReqError) {
|
|
||||||
log.debug('restore request validation failed', {
|
|
||||||
error: restoreReqError,
|
|
||||||
method: 'validateRestoreRequest',
|
|
||||||
xml,
|
|
||||||
});
|
|
||||||
return cb(restoreReqError);
|
|
||||||
}
|
|
||||||
// If do not specify Tier, set "Standard"
|
|
||||||
return cb(null, {
|
|
||||||
days: Number.parseInt(restoreRequest.Days, 10),
|
|
||||||
tier: restoreRequest.Tier && restoreRequest.Tier[0] ? restoreRequest.Tier[0] : 'Standard',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* convertToXml - Convert restore request info object to xml
|
|
||||||
* @param days - restore days
|
|
||||||
* @param tier - restore tier
|
|
||||||
* @return - returns restore request information xml string
|
|
||||||
*/
|
|
||||||
export function convertToXml(days: string, tier: string) {
|
|
||||||
if (!(days && tier)) {
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
return [
|
|
||||||
'<RestoreRequest xmlns="http://s3.amazonaws.com/doc/2006-03-01/">',
|
|
||||||
`<Days>${days}</Days>`,
|
|
||||||
`<Tier>${tier}</Tier>`,
|
|
||||||
'</RestoreRequest>',
|
|
||||||
].join('');
|
|
||||||
}
|
|
|
@ -1,21 +1,5 @@
|
||||||
const msInOneDay = 24 * 60 * 60 * 1000; // Milliseconds in a day.
|
export const getHexMD5 = (base64MD5: WithImplicitCoercion<string>) =>
|
||||||
|
Buffer.from(base64MD5, 'base64').toString('hex');
|
||||||
export const getMD5Buffer = (base64MD5: WithImplicitCoercion<string> | Uint8Array) =>
|
|
||||||
base64MD5 instanceof Uint8Array ? base64MD5 : Buffer.from(base64MD5, 'base64')
|
|
||||||
|
|
||||||
export const getHexMD5 = (base64MD5: WithImplicitCoercion<string> | Uint8Array) =>
|
|
||||||
getMD5Buffer(base64MD5).toString('hex');
|
|
||||||
|
|
||||||
export const getBase64MD5 = (hexMD5: WithImplicitCoercion<string>) =>
|
export const getBase64MD5 = (hexMD5: WithImplicitCoercion<string>) =>
|
||||||
Buffer.from(hexMD5, 'hex').toString('base64');
|
Buffer.from(hexMD5, 'hex').toString('base64');
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Calculates the number of scaled milliseconds per day based on the given time progression factor.
|
|
||||||
* This function is intended for testing and simulation purposes only.
|
|
||||||
* @param {number} timeProgressionFactor - The desired time progression factor for scaling.
|
|
||||||
* @returns {number} The number of scaled milliseconds per day.
|
|
||||||
* If the result is 0, the minimum value of 1 millisecond is returned.
|
|
||||||
*/
|
|
||||||
export const scaleMsPerDay = (timeProgressionFactor: number): number =>
|
|
||||||
Math.round(msInOneDay / (timeProgressionFactor || 1)) || 1;
|
|
||||||
|
|
|
@ -3,11 +3,6 @@ import * as werelogs from 'werelogs';
|
||||||
import errors, { ArsenalError } from '../errors';
|
import errors, { ArsenalError } from '../errors';
|
||||||
import escapeForXml from './escapeForXml';
|
import escapeForXml from './escapeForXml';
|
||||||
|
|
||||||
export interface BucketTag {
|
|
||||||
Key: string;
|
|
||||||
Value: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
const errorInvalidArgument = () => errors.InvalidArgument
|
const errorInvalidArgument = () => errors.InvalidArgument
|
||||||
.customizeDescription('The header \'x-amz-tagging\' shall be ' +
|
.customizeDescription('The header \'x-amz-tagging\' shall be ' +
|
||||||
'encoded as UTF-8 then URLEncoded URL query parameters without ' +
|
'encoded as UTF-8 then URLEncoded URL query parameters without ' +
|
||||||
|
@ -37,15 +32,6 @@ export const _validator = {
|
||||||
&& tag.Key[0] !== undefined && tag.Value[0] !== undefined
|
&& tag.Key[0] !== undefined && tag.Value[0] !== undefined
|
||||||
&& typeof tag.Key[0] === 'string' && typeof tag.Value[0] === 'string',
|
&& typeof tag.Key[0] === 'string' && typeof tag.Value[0] === 'string',
|
||||||
|
|
||||||
// Allowed characters are letters, whitespace, and numbers, plus
|
|
||||||
// the following special characters: + - = . _ : /
|
|
||||||
// Maximum key length: 128 Unicode characters
|
|
||||||
// Maximum value length: 256 Unicode characters
|
|
||||||
validateTagObjectStructure: (tag: BucketTag) => tag
|
|
||||||
&& Object.keys(tag).length === 2
|
|
||||||
&& typeof tag.Key === 'string' && typeof tag.Value === 'string'
|
|
||||||
&& tag.Key.length >= 1 && tag.Value.length >= 1,
|
|
||||||
|
|
||||||
validateXMLStructure: (result: any) =>
|
validateXMLStructure: (result: any) =>
|
||||||
result && Object.keys(result).length === 1 &&
|
result && Object.keys(result).length === 1 &&
|
||||||
result.Tagging &&
|
result.Tagging &&
|
||||||
|
@ -114,47 +100,12 @@ function _validateTags(tags: Array<{ Key: string[], Value: string[] }>) {
|
||||||
}
|
}
|
||||||
// not repeating keys
|
// not repeating keys
|
||||||
if (tags.length > Object.keys(tagsResult).length) {
|
if (tags.length > Object.keys(tagsResult).length) {
|
||||||
return errors.InvalidTag.customizeDescription(
|
return errors.InvalidTag.customizeDescription('Cannot provide ' +
|
||||||
'Cannot provide multiple Tags with the same key'
|
'multiple Tags with the same key');
|
||||||
);
|
|
||||||
}
|
}
|
||||||
return tagsResult;
|
return tagsResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** areTagsValid - Validate bucket tags
|
|
||||||
* @param tags - tags parsed from xml to be validated
|
|
||||||
* @return result - true if the tags are valide, false otherwise
|
|
||||||
*/
|
|
||||||
export function areTagsValid(tags: Array<BucketTag>) {
|
|
||||||
if (tags.length === 0) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
// Maximum number of tags per resource: 50
|
|
||||||
if (tags.length > 50) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const tagsResult = {};
|
|
||||||
for (const tag of tags) {
|
|
||||||
if (!_validator.validateTagObjectStructure(tag)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const { Key: key, Value: value } = tag;
|
|
||||||
|
|
||||||
const result = _validator.validateKeyValue(key, value);
|
|
||||||
if (result instanceof Error) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
tagsResult[key] = value;
|
|
||||||
}
|
|
||||||
// not repeating keys
|
|
||||||
if (tags.length > Object.keys(tagsResult).length) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** parseTagXml - Parse and validate xml body, returning callback with object
|
/** parseTagXml - Parse and validate xml body, returning callback with object
|
||||||
* tags : { key: value}
|
* tags : { key: value}
|
||||||
* @param xml - xml body to parse and validate
|
* @param xml - xml body to parse and validate
|
||||||
|
|
|
@ -77,34 +77,6 @@ export function _checkUnmodifiedSince(
|
||||||
return { present: false, error: null };
|
return { present: false, error: null };
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* checks 'if-modified-since' and 'if-unmodified-since' headers if included in
|
|
||||||
* request against last-modified date of object
|
|
||||||
* @param headers - headers from request object
|
|
||||||
* @param lastModified - last modified date of object
|
|
||||||
* @return contains modifiedSince and unmodifiedSince res objects
|
|
||||||
*/
|
|
||||||
export function checkDateModifiedHeaders(
|
|
||||||
headers: http.IncomingHttpHeaders,
|
|
||||||
lastModified: string,
|
|
||||||
) {
|
|
||||||
const lastModifiedDate = new Date(lastModified);
|
|
||||||
lastModifiedDate.setMilliseconds(0);
|
|
||||||
const millis = lastModifiedDate.getTime();
|
|
||||||
|
|
||||||
const ifModifiedSinceHeader = headers['if-modified-since'] ||
|
|
||||||
headers['x-amz-copy-source-if-modified-since'];
|
|
||||||
const ifUnmodifiedSinceHeader = headers['if-unmodified-since'] ||
|
|
||||||
headers['x-amz-copy-source-if-unmodified-since'];
|
|
||||||
|
|
||||||
const modifiedSinceRes = _checkModifiedSince(ifModifiedSinceHeader?.toString(),
|
|
||||||
millis);
|
|
||||||
const unmodifiedSinceRes = _checkUnmodifiedSince(ifUnmodifiedSinceHeader?.toString(),
|
|
||||||
millis);
|
|
||||||
|
|
||||||
return { modifiedSinceRes, unmodifiedSinceRes };
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* validateConditionalHeaders - validates 'if-modified-since',
|
* validateConditionalHeaders - validates 'if-modified-since',
|
||||||
* 'if-unmodified-since', 'if-match' or 'if-none-match' headers if included in
|
* 'if-unmodified-since', 'if-match' or 'if-none-match' headers if included in
|
||||||
|
@ -120,14 +92,21 @@ export function validateConditionalHeaders(
|
||||||
lastModified: string,
|
lastModified: string,
|
||||||
contentMD5: string,
|
contentMD5: string,
|
||||||
): {} | { present: boolean; error: ArsenalError } {
|
): {} | { present: boolean; error: ArsenalError } {
|
||||||
|
const lastModifiedDate = new Date(lastModified);
|
||||||
|
lastModifiedDate.setMilliseconds(0);
|
||||||
|
const millis = lastModifiedDate.getTime();
|
||||||
const ifMatchHeader = headers['if-match'] ||
|
const ifMatchHeader = headers['if-match'] ||
|
||||||
headers['x-amz-copy-source-if-match'];
|
headers['x-amz-copy-source-if-match'];
|
||||||
const ifNoneMatchHeader = headers['if-none-match'] ||
|
const ifNoneMatchHeader = headers['if-none-match'] ||
|
||||||
headers['x-amz-copy-source-if-none-match'];
|
headers['x-amz-copy-source-if-none-match'];
|
||||||
|
const ifModifiedSinceHeader = headers['if-modified-since'] ||
|
||||||
|
headers['x-amz-copy-source-if-modified-since'];
|
||||||
|
const ifUnmodifiedSinceHeader = headers['if-unmodified-since'] ||
|
||||||
|
headers['x-amz-copy-source-if-unmodified-since'];
|
||||||
const etagMatchRes = _checkEtagMatch(ifMatchHeader?.toString(), contentMD5);
|
const etagMatchRes = _checkEtagMatch(ifMatchHeader?.toString(), contentMD5);
|
||||||
const etagNoneMatchRes = _checkEtagNoneMatch(ifNoneMatchHeader?.toString(), contentMD5);
|
const etagNoneMatchRes = _checkEtagNoneMatch(ifNoneMatchHeader?.toString(), contentMD5);
|
||||||
const { modifiedSinceRes, unmodifiedSinceRes } =
|
const modifiedSinceRes = _checkModifiedSince(ifModifiedSinceHeader?.toString(), millis);
|
||||||
checkDateModifiedHeaders(headers, lastModified);
|
const unmodifiedSinceRes = _checkUnmodifiedSince(ifUnmodifiedSinceHeader?.toString(), millis);
|
||||||
// If-Unmodified-Since condition evaluates to false and If-Match
|
// If-Unmodified-Since condition evaluates to false and If-Match
|
||||||
// is not present, then return the error. Otherwise, If-Unmodified-Since is
|
// is not present, then return the error. Otherwise, If-Unmodified-Since is
|
||||||
// silent when If-Match match, and when If-Match does not match, it's the
|
// silent when If-Match match, and when If-Match does not match, it's the
|
||||||
|
|
|
@ -1,7 +1,4 @@
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
|
||||||
import { RequestLogger } from 'werelogs';
|
|
||||||
|
|
||||||
import errors from '../errors';
|
import errors from '../errors';
|
||||||
import routeGET from './routes/routeGET';
|
import routeGET from './routes/routeGET';
|
||||||
import routePUT from './routes/routePUT';
|
import routePUT from './routes/routePUT';
|
||||||
|
@ -13,7 +10,7 @@ import * as routesUtils from './routesUtils';
|
||||||
import routeWebsite from './routes/routeWebsite';
|
import routeWebsite from './routes/routeWebsite';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
import StatsClient from '../metrics/StatsClient';
|
import StatsClient from '../metrics/StatsClient';
|
||||||
import { objectKeyByteLimit } from '../constants';
|
|
||||||
import * as requestUtils from '../../lib/policyEvaluator/requestUtils';
|
import * as requestUtils from '../../lib/policyEvaluator/requestUtils';
|
||||||
|
|
||||||
const routeMap = {
|
const routeMap = {
|
||||||
|
@ -67,14 +64,8 @@ function checkBucketAndKey(
|
||||||
blacklistedPrefixes.object);
|
blacklistedPrefixes.object);
|
||||||
if (!result.isValid) {
|
if (!result.isValid) {
|
||||||
log.debug('invalid object key', { objectKey });
|
log.debug('invalid object key', { objectKey });
|
||||||
if (result.invalidPrefix) {
|
return errors.InvalidArgument.customizeDescription('Object key ' +
|
||||||
return errors.InvalidArgument.customizeDescription('Invalid ' +
|
`must not start with "${result.invalidPrefix}".`);
|
||||||
'prefix - object key cannot start with ' +
|
|
||||||
`"${result.invalidPrefix}".`);
|
|
||||||
}
|
|
||||||
return errors.KeyTooLong.customizeDescription('Object key is too ' +
|
|
||||||
'long. Maximum number of bytes allowed in keys is ' +
|
|
||||||
`${objectKeyByteLimit}.`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if ((reqQuery.partNumber || reqQuery.uploadId)
|
if ((reqQuery.partNumber || reqQuery.uploadId)
|
||||||
|
@ -219,8 +210,7 @@ export default function routes(
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
logger.newRequestLogger());
|
logger.newRequestLogger());
|
||||||
|
|
||||||
if (!req.url!.startsWith('/_/healthcheck') &&
|
if (!req.url!.startsWith('/_/healthcheck')) {
|
||||||
!req.url!.startsWith('/_/report')) {
|
|
||||||
log.info('received request', clientInfo);
|
log.info('received request', clientInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import { RequestLogger } from 'werelogs';
|
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import StatsClient from '../../metrics/StatsClient';
|
import StatsClient from '../../metrics/StatsClient';
|
||||||
|
@ -43,8 +41,6 @@ export default function routeDELETE(
|
||||||
return call('bucketDeleteEncryption');
|
return call('bucketDeleteEncryption');
|
||||||
} else if (query?.tagging !== undefined) {
|
} else if (query?.tagging !== undefined) {
|
||||||
return call('bucketDeleteTagging');
|
return call('bucketDeleteTagging');
|
||||||
} else if (query?.quota !== undefined) {
|
|
||||||
return call('bucketDeleteQuota');
|
|
||||||
}
|
}
|
||||||
call('bucketDelete');
|
call('bucketDelete');
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import { RequestLogger } from 'werelogs';
|
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
@ -58,10 +56,6 @@ export default function routerGET(
|
||||||
call('bucketGetNotification');
|
call('bucketGetNotification');
|
||||||
} else if (query.encryption !== undefined) {
|
} else if (query.encryption !== undefined) {
|
||||||
call('bucketGetEncryption');
|
call('bucketGetEncryption');
|
||||||
} else if (query.search !== undefined) {
|
|
||||||
call('metadataSearch')
|
|
||||||
} else if (query.quota !== undefined) {
|
|
||||||
call('bucketGetQuota');
|
|
||||||
} else {
|
} else {
|
||||||
// GET bucket
|
// GET bucket
|
||||||
call('bucketGet');
|
call('bucketGet');
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import { RequestLogger } from 'werelogs';
|
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import StatsClient from '../../metrics/StatsClient';
|
import StatsClient from '../../metrics/StatsClient';
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import { RequestLogger } from 'werelogs';
|
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import { RequestLogger } from 'werelogs';
|
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
@ -58,14 +56,6 @@ export default function routePOST(
|
||||||
corsHeaders));
|
corsHeaders));
|
||||||
}
|
}
|
||||||
|
|
||||||
// POST Object restore
|
|
||||||
if (query.restore !== undefined) {
|
|
||||||
return api.callApiMethod('objectRestore', request, response,
|
|
||||||
log, (err, statusCode, resHeaders) =>
|
|
||||||
routesUtils.responseNoBody(err, resHeaders, response,
|
|
||||||
statusCode, log));
|
|
||||||
}
|
|
||||||
|
|
||||||
return routesUtils.responseNoBody(errors.NotImplemented, null, response,
|
return routesUtils.responseNoBody(errors.NotImplemented, null, response,
|
||||||
200, log);
|
200, log);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import { RequestLogger } from 'werelogs';
|
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
@ -105,13 +103,6 @@ export default function routePUT(
|
||||||
return routesUtils.responseNoBody(err, corsHeaders,
|
return routesUtils.responseNoBody(err, corsHeaders,
|
||||||
response, 200, log);
|
response, 200, log);
|
||||||
});
|
});
|
||||||
} else if (query.quota !== undefined) {
|
|
||||||
api.callApiMethod('bucketUpdateQuota', request, response,
|
|
||||||
log, (err, resHeaders) => {
|
|
||||||
routesUtils.statsReport500(err, statsClient);
|
|
||||||
return routesUtils.responseNoBody(err, resHeaders, response,
|
|
||||||
200, log);
|
|
||||||
});
|
|
||||||
} else {
|
} else {
|
||||||
// PUT bucket
|
// PUT bucket
|
||||||
return api.callApiMethod('bucketPut', request, response, log,
|
return api.callApiMethod('bucketPut', request, response, log,
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
import { RequestLogger } from 'werelogs';
|
|
||||||
|
|
||||||
import * as routesUtils from '../routesUtils';
|
import * as routesUtils from '../routesUtils';
|
||||||
import errors from '../../errors';
|
import errors from '../../errors';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
@ -11,7 +9,7 @@ export default function routerWebsite(
|
||||||
api: { callApiMethod: routesUtils.CallApiMethod },
|
api: { callApiMethod: routesUtils.CallApiMethod },
|
||||||
log: RequestLogger,
|
log: RequestLogger,
|
||||||
statsClient?: StatsClient,
|
statsClient?: StatsClient,
|
||||||
dataRetrievalParams?: any,
|
dataRetrievalFn?: any,
|
||||||
) {
|
) {
|
||||||
const { bucketName, query } = request as any
|
const { bucketName, query } = request as any
|
||||||
log.debug('routing request', { method: 'routerWebsite' });
|
log.debug('routing request', { method: 'routerWebsite' });
|
||||||
|
@ -29,11 +27,6 @@ export default function routerWebsite(
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
// request being redirected
|
// request being redirected
|
||||||
if (redirectInfo) {
|
if (redirectInfo) {
|
||||||
if (err && redirectInfo.withError) {
|
|
||||||
return routesUtils.redirectRequestOnError(err,
|
|
||||||
'GET', redirectInfo, dataGetInfo, dataRetrievalParams,
|
|
||||||
response, resMetaHeaders, log)
|
|
||||||
}
|
|
||||||
// note that key might have been modified in websiteGet
|
// note that key might have been modified in websiteGet
|
||||||
// api to add index document
|
// api to add index document
|
||||||
return routesUtils.redirectRequest(redirectInfo,
|
return routesUtils.redirectRequest(redirectInfo,
|
||||||
|
@ -45,7 +38,7 @@ export default function routerWebsite(
|
||||||
// user has their own error page
|
// user has their own error page
|
||||||
if (err && dataGetInfo) {
|
if (err && dataGetInfo) {
|
||||||
return routesUtils.streamUserErrorPage(err, dataGetInfo,
|
return routesUtils.streamUserErrorPage(err, dataGetInfo,
|
||||||
dataRetrievalParams, response, resMetaHeaders, log);
|
dataRetrievalFn, response, resMetaHeaders, log);
|
||||||
}
|
}
|
||||||
// send default error html response
|
// send default error html response
|
||||||
if (err) {
|
if (err) {
|
||||||
|
@ -55,7 +48,7 @@ export default function routerWebsite(
|
||||||
}
|
}
|
||||||
// no error, stream data
|
// no error, stream data
|
||||||
return routesUtils.responseStreamData(null, query,
|
return routesUtils.responseStreamData(null, query,
|
||||||
resMetaHeaders, dataGetInfo, dataRetrievalParams, response,
|
resMetaHeaders, dataGetInfo, dataRetrievalFn, response,
|
||||||
undefined, log);
|
undefined, log);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -64,11 +57,6 @@ export default function routerWebsite(
|
||||||
(err, resMetaHeaders, redirectInfo, key) => {
|
(err, resMetaHeaders, redirectInfo, key) => {
|
||||||
routesUtils.statsReport500(err, statsClient);
|
routesUtils.statsReport500(err, statsClient);
|
||||||
if (redirectInfo) {
|
if (redirectInfo) {
|
||||||
if (err && redirectInfo.withError) {
|
|
||||||
return routesUtils.redirectRequestOnError(err,
|
|
||||||
'HEAD', redirectInfo, null, dataRetrievalParams,
|
|
||||||
response, resMetaHeaders, log)
|
|
||||||
}
|
|
||||||
return routesUtils.redirectRequest(redirectInfo,
|
return routesUtils.redirectRequest(redirectInfo,
|
||||||
// TODO ARSN-217 encrypted does not exists in request.connection
|
// TODO ARSN-217 encrypted does not exists in request.connection
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
|
|
|
@ -1,16 +1,11 @@
|
||||||
import * as url from 'url';
|
import * as url from 'url';
|
||||||
import * as http from 'http';
|
|
||||||
import { eachSeries } from 'async';
|
|
||||||
|
|
||||||
import { RequestLogger } from 'werelogs';
|
|
||||||
|
|
||||||
import * as ipCheck from '../ipCheck';
|
import * as ipCheck from '../ipCheck';
|
||||||
import errors, { ArsenalError } from '../errors';
|
import errors, { ArsenalError } from '../errors';
|
||||||
import * as constants from '../constants';
|
import * as constants from '../constants';
|
||||||
|
import { eachSeries } from 'async';
|
||||||
import DataWrapper from '../storage/data/DataWrapper';
|
import DataWrapper from '../storage/data/DataWrapper';
|
||||||
|
import * as http from 'http';
|
||||||
import StatsClient from '../metrics/StatsClient';
|
import StatsClient from '../metrics/StatsClient';
|
||||||
import { objectKeyByteLimit } from '../constants';
|
|
||||||
const jsutil = require('../jsutil');
|
|
||||||
|
|
||||||
export type CallApiMethod = (
|
export type CallApiMethod = (
|
||||||
methodName: string,
|
methodName: string,
|
||||||
|
@ -149,15 +144,6 @@ const XMLResponseBackend = {
|
||||||
'<Error>',
|
'<Error>',
|
||||||
`<Code>${errCode.message}</Code>`,
|
`<Code>${errCode.message}</Code>`,
|
||||||
`<Message>${errCode.description}</Message>`,
|
`<Message>${errCode.description}</Message>`,
|
||||||
);
|
|
||||||
const invalidArguments = errCode.metadata.get('invalidArguments') || [];
|
|
||||||
invalidArguments.forEach((invalidArgument, index) => {
|
|
||||||
const counter = index + 1;
|
|
||||||
const { ArgumentName, ArgumentValue } = invalidArgument as any;
|
|
||||||
xml.push(`<ArgumentName${counter}>${ArgumentName}</ArgumentName${counter}>`);
|
|
||||||
xml.push(`<ArgumentValue${counter}>${ArgumentValue}</ArgumentValue${counter}>`);
|
|
||||||
});
|
|
||||||
xml.push(
|
|
||||||
'<Resource></Resource>',
|
'<Resource></Resource>',
|
||||||
`<RequestId>${log.getSerializedUids()}</RequestId>`,
|
`<RequestId>${log.getSerializedUids()}</RequestId>`,
|
||||||
'</Error>',
|
'</Error>',
|
||||||
|
@ -227,18 +213,9 @@ const JSONResponseBackend = {
|
||||||
"requestId": "4442587FB7D0A2F9"
|
"requestId": "4442587FB7D0A2F9"
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
const invalidArguments = errCode.metadata.get('invalidArguments') || [];
|
|
||||||
const invalids = invalidArguments.reduce((acc, invalidArgument, index) => {
|
|
||||||
const counter = index + 1;
|
|
||||||
const { ArgumentName, ArgumentValue } = invalidArgument as any;
|
|
||||||
const name = `ArgumentName${counter}`;
|
|
||||||
const value = `ArgumentValue${counter}`;
|
|
||||||
return { ...acc, [name]: ArgumentName, [value]: ArgumentValue };
|
|
||||||
}, {});
|
|
||||||
const data = JSON.stringify({
|
const data = JSON.stringify({
|
||||||
code: errCode.message,
|
code: errCode.message,
|
||||||
message: errCode.description,
|
message: errCode.description,
|
||||||
...invalids,
|
|
||||||
resource: null,
|
resource: null,
|
||||||
requestId: log.getSerializedUids(),
|
requestId: log.getSerializedUids(),
|
||||||
});
|
});
|
||||||
|
@ -385,18 +362,12 @@ function retrieveData(
|
||||||
response.destroy();
|
response.destroy();
|
||||||
responseDestroyed = true;
|
responseDestroyed = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
const _destroyReadable = (readable: http.IncomingMessage | null) => {
|
|
||||||
// s3-data sends Readable stream only which does not implement destroy
|
|
||||||
if (readable && readable.destroy) {
|
|
||||||
readable.destroy();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// the S3-client might close the connection while we are processing it
|
// the S3-client might close the connection while we are processing it
|
||||||
response.once('close', () => {
|
response.once('close', () => {
|
||||||
responseDestroyed = true;
|
responseDestroyed = true;
|
||||||
_destroyReadable(currentStream);
|
if (currentStream) {
|
||||||
|
currentStream.destroy();
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const {
|
const {
|
||||||
|
@ -413,7 +384,6 @@ function retrieveData(
|
||||||
return eachSeries(locations,
|
return eachSeries(locations,
|
||||||
(current, next) => data.get(current, response, log,
|
(current, next) => data.get(current, response, log,
|
||||||
(err: any, readable: http.IncomingMessage) => {
|
(err: any, readable: http.IncomingMessage) => {
|
||||||
const cbOnce = jsutil.once(next);
|
|
||||||
// NB: readable is of IncomingMessage type
|
// NB: readable is of IncomingMessage type
|
||||||
if (err) {
|
if (err) {
|
||||||
log.error('failed to get object', {
|
log.error('failed to get object', {
|
||||||
|
@ -421,7 +391,7 @@ function retrieveData(
|
||||||
method: 'retrieveData',
|
method: 'retrieveData',
|
||||||
});
|
});
|
||||||
_destroyResponse();
|
_destroyResponse();
|
||||||
return cbOnce(err);
|
return next(err);
|
||||||
}
|
}
|
||||||
// response.isclosed is set by the S3 server. Might happen if
|
// response.isclosed is set by the S3 server. Might happen if
|
||||||
// the S3-client closes the connection before the first request
|
// the S3-client closes the connection before the first request
|
||||||
|
@ -430,24 +400,24 @@ function retrieveData(
|
||||||
if (responseDestroyed || response.isclosed) {
|
if (responseDestroyed || response.isclosed) {
|
||||||
log.debug(
|
log.debug(
|
||||||
'response destroyed before readable could stream');
|
'response destroyed before readable could stream');
|
||||||
_destroyReadable(readable);
|
readable.destroy();
|
||||||
const responseErr = new Error();
|
const responseErr = new Error();
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
responseErr.code = 'ResponseError';
|
responseErr.code = 'ResponseError';
|
||||||
responseErr.message = 'response closed by client request before all data sent';
|
responseErr.message = 'response closed by client request before all data sent';
|
||||||
return cbOnce(responseErr);
|
return next(responseErr);
|
||||||
}
|
}
|
||||||
// readable stream successfully consumed
|
// readable stream successfully consumed
|
||||||
readable.on('end', () => {
|
readable.on('end', () => {
|
||||||
currentStream = null;
|
currentStream = null;
|
||||||
log.debug('readable stream end reached');
|
log.debug('readable stream end reached');
|
||||||
return cbOnce();
|
return next();
|
||||||
});
|
});
|
||||||
// errors on server side with readable stream
|
// errors on server side with readable stream
|
||||||
readable.on('error', err => {
|
readable.on('error', err => {
|
||||||
log.error('error piping data from source');
|
log.error('error piping data from source');
|
||||||
_destroyResponse();
|
_destroyResponse();
|
||||||
return cbOnce(err);
|
return next(err);
|
||||||
});
|
});
|
||||||
currentStream = readable;
|
currentStream = readable;
|
||||||
return readable.pipe(response, { end: false });
|
return readable.pipe(response, { end: false });
|
||||||
|
@ -694,8 +664,6 @@ export function streamUserErrorPage(
|
||||||
log: RequestLogger,
|
log: RequestLogger,
|
||||||
) {
|
) {
|
||||||
setCommonResponseHeaders(corsHeaders, response, log);
|
setCommonResponseHeaders(corsHeaders, response, log);
|
||||||
response.setHeader('x-amz-error-code', err.message);
|
|
||||||
response.setHeader('x-amz-error-message', err.description);
|
|
||||||
response.writeHead(err.code, { 'Content-type': 'text/html' });
|
response.writeHead(err.code, { 'Content-type': 'text/html' });
|
||||||
response.on('finish', () => {
|
response.on('finish', () => {
|
||||||
// TODO ARSN-216 Fix logger
|
// TODO ARSN-216 Fix logger
|
||||||
|
@ -878,7 +846,7 @@ export function redirectRequest(
|
||||||
}
|
}
|
||||||
let redirectLocation = justPath ? `/${redirectKey}` :
|
let redirectLocation = justPath ? `/${redirectKey}` :
|
||||||
`${redirectProtocol}://${redirectHostName}/${redirectKey}`;
|
`${redirectProtocol}://${redirectHostName}/${redirectKey}`;
|
||||||
if (!redirectKey && redirectLocationHeader && redirectLocation !== '/') {
|
if (!redirectKey && redirectLocationHeader) {
|
||||||
// remove hanging slash
|
// remove hanging slash
|
||||||
redirectLocation = redirectLocation.slice(0, -1);
|
redirectLocation = redirectLocation.slice(0, -1);
|
||||||
}
|
}
|
||||||
|
@ -895,52 +863,6 @@ export function redirectRequest(
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* redirectRequestOnError - redirect with an error body
|
|
||||||
* @param err - arsenal error object
|
|
||||||
* @param method - HTTP method
|
|
||||||
* @param routingInfo - info for routing
|
|
||||||
* @param [routingInfo.withError] - flag to differentiate from routing rules
|
|
||||||
* @param [routingInfo.location] - location header
|
|
||||||
* @param dataLocations --
|
|
||||||
* - array of locations to get streams from backend
|
|
||||||
* @param retrieveDataParams - params to create instance of
|
|
||||||
* data retrieval function
|
|
||||||
* @param response - response object
|
|
||||||
* @param corsHeaders - CORS-related response headers
|
|
||||||
* @param log - Werelogs instance
|
|
||||||
*/
|
|
||||||
export function redirectRequestOnError(
|
|
||||||
err: ArsenalError,
|
|
||||||
method: 'HEAD' | 'GET',
|
|
||||||
routingInfo: {
|
|
||||||
withError: true;
|
|
||||||
location: string;
|
|
||||||
},
|
|
||||||
dataLocations: { size: string | number }[] | null,
|
|
||||||
retrieveDataParams: any,
|
|
||||||
response: http.ServerResponse,
|
|
||||||
corsHeaders: { [key: string]: string },
|
|
||||||
log: RequestLogger,
|
|
||||||
) {
|
|
||||||
response.setHeader('Location', routingInfo.location);
|
|
||||||
|
|
||||||
if (!dataLocations && err.is.Found) {
|
|
||||||
if (method === 'HEAD') {
|
|
||||||
return errorHeaderResponse(err, response, corsHeaders, log);
|
|
||||||
}
|
|
||||||
response.setHeader('x-amz-error-code', err.message);
|
|
||||||
response.setHeader('x-amz-error-message', err.description);
|
|
||||||
return errorHtmlResponse(err, false, '', response, corsHeaders, log);
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is reached only for website error document (GET only)
|
|
||||||
const overrideErrorCode = err.flatten();
|
|
||||||
overrideErrorCode.code = 301;
|
|
||||||
return streamUserErrorPage(ArsenalError.unflatten(overrideErrorCode)!,
|
|
||||||
dataLocations || [], retrieveDataParams, response, corsHeaders, log);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get bucket name and object name from the request
|
* Get bucket name and object name from the request
|
||||||
* @param request - http request object
|
* @param request - http request object
|
||||||
|
@ -1155,9 +1077,6 @@ export function isValidObjectKey(objectKey: string, prefixBlacklist: string[]) {
|
||||||
if (invalidPrefix) {
|
if (invalidPrefix) {
|
||||||
return { isValid: false, invalidPrefix };
|
return { isValid: false, invalidPrefix };
|
||||||
}
|
}
|
||||||
if (Buffer.byteLength(objectKey, 'utf8') > objectKeyByteLimit) {
|
|
||||||
return { isValid: false };
|
|
||||||
}
|
|
||||||
return { isValid: true };
|
return { isValid: true };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,8 +2,6 @@ const async = require('async');
|
||||||
const PassThrough = require('stream').PassThrough;
|
const PassThrough = require('stream').PassThrough;
|
||||||
const assert = require('assert');
|
const assert = require('assert');
|
||||||
|
|
||||||
const { Logger } = require('werelogs');
|
|
||||||
|
|
||||||
const errors = require('../../errors').default;
|
const errors = require('../../errors').default;
|
||||||
const MD5Sum = require('../../s3middleware/MD5Sum').default;
|
const MD5Sum = require('../../s3middleware/MD5Sum').default;
|
||||||
const NullStream = require('../../s3middleware/nullStream').default;
|
const NullStream = require('../../s3middleware/nullStream').default;
|
||||||
|
@ -29,7 +27,6 @@ class DataWrapper {
|
||||||
this.metadata = metadata;
|
this.metadata = metadata;
|
||||||
this.locStorageCheckFn = locStorageCheckFn;
|
this.locStorageCheckFn = locStorageCheckFn;
|
||||||
this.vault = vault;
|
this.vault = vault;
|
||||||
this.logger = new Logger('DataWrapper');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
put(cipherBundle, value, valueSize, keyContext, backendInfo, log, cb) {
|
put(cipherBundle, value, valueSize, keyContext, backendInfo, log, cb) {
|
||||||
|
@ -130,7 +127,7 @@ class DataWrapper {
|
||||||
}
|
}
|
||||||
|
|
||||||
delete(objectGetInfo, log, cb) {
|
delete(objectGetInfo, log, cb) {
|
||||||
const callback = cb || (() => {});
|
const callback = cb || log.end;
|
||||||
const isMdModelVersion2 = typeof(objectGetInfo) === 'string';
|
const isMdModelVersion2 = typeof(objectGetInfo) === 'string';
|
||||||
const isRequiredStringKey =
|
const isRequiredStringKey =
|
||||||
constants.clientsRequireStringKey[this.implName];
|
constants.clientsRequireStringKey[this.implName];
|
||||||
|
@ -179,9 +176,7 @@ class DataWrapper {
|
||||||
newObjDataStoreName)) {
|
newObjDataStoreName)) {
|
||||||
return process.nextTick(cb);
|
return process.nextTick(cb);
|
||||||
}
|
}
|
||||||
const delLog = this.logger.newRequestLoggerFromSerializedUids(
|
log.trace('initiating batch delete', {
|
||||||
log.getSerializedUids());
|
|
||||||
delLog.trace('initiating batch delete', {
|
|
||||||
keys: locations,
|
keys: locations,
|
||||||
implName: this.implName,
|
implName: this.implName,
|
||||||
method: 'batchDelete',
|
method: 'batchDelete',
|
||||||
|
@ -207,21 +202,21 @@ class DataWrapper {
|
||||||
return false;
|
return false;
|
||||||
});
|
});
|
||||||
if (shouldBatchDelete && keys.length > 1) {
|
if (shouldBatchDelete && keys.length > 1) {
|
||||||
return this.client.batchDelete(backendName, { keys }, delLog, cb);
|
return this.client.batchDelete(backendName, { keys }, log, cb);
|
||||||
}
|
}
|
||||||
return async.eachLimit(locations, 5, (loc, next) => {
|
return async.eachLimit(locations, 5, (loc, next) => {
|
||||||
process.nextTick(() => this.delete(loc, delLog, next));
|
process.nextTick(() => this.delete(loc, log, next));
|
||||||
},
|
},
|
||||||
err => {
|
err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
delLog.end().error('batch delete failed', { error: err });
|
log.end().error('batch delete failed', { error: err });
|
||||||
// deletion of non-existing objects result in 204
|
// deletion of non-existing objects result in 204
|
||||||
if (err.code === 404) {
|
if (err.code === 404) {
|
||||||
return cb();
|
return cb();
|
||||||
}
|
}
|
||||||
return cb(err);
|
return cb(err);
|
||||||
}
|
}
|
||||||
delLog.end().trace('batch delete successfully completed');
|
log.end().trace('batch delete successfully completed');
|
||||||
return cb();
|
return cb();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -989,14 +984,13 @@ class DataWrapper {
|
||||||
return this.client.delete(objectGetInfo, log.getSerializedUids(),
|
return this.client.delete(objectGetInfo, log.getSerializedUids(),
|
||||||
err => {
|
err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
// TODO: sproxydclient and hdclient does not return standard Arsenal error yet.
|
if (err.is.ObjNotFound) {
|
||||||
if (err.code === 404) {
|
|
||||||
log.info('no such key in datastore', {
|
log.info('no such key in datastore', {
|
||||||
objectGetInfo,
|
objectGetInfo,
|
||||||
implName: this.implName,
|
implName: this.implName,
|
||||||
moreRetries: 'no',
|
moreRetries: 'no',
|
||||||
});
|
});
|
||||||
return cb(errors.ObjNotFound);
|
return cb(err);
|
||||||
}
|
}
|
||||||
log.error('delete error from datastore', {
|
log.error('delete error from datastore', {
|
||||||
error: err,
|
error: err,
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue